From 3c6b907313cca1768efa25eb3560efd569b1766f Mon Sep 17 00:00:00 2001 From: esmael Date: Thu, 14 Dec 2023 13:20:06 -0800 Subject: [PATCH] Update //build to m114.0.5735.331. b/313662336 Change-Id: I025cd291b47a5376c16109917831dc39a974d70b --- build/.gitignore | 1 + build/BUILD.gn | 39 +- build/METADATA | 20 - build/OWNERS | 16 +- build/OWNERS.setnoparent | 31 +- build/PRESUBMIT.py | 57 + build/PRESUBMIT_test.py | 43 + build/action_helpers.py | 126 + build/action_helpers_unittest.py | 87 + build/add_rts_filters.py | 25 +- build/android/AndroidManifest.xml | 8 +- build/android/BUILD.gn | 126 +- build/android/COMMON_METADATA | 1 + build/android/DIR_METADATA | 2 +- build/android/OWNERS | 5 +- build/android/PRESUBMIT.py | 54 +- build/android/adb_chrome_public_command_line | 2 +- build/android/adb_command_line.py | 7 +- build/android/adb_gdb | 64 +- build/android/adb_install_apk.py | 11 +- build/android/adb_logcat_monitor.py | 27 +- build/android/adb_logcat_printer.py | 46 +- build/android/adb_profile_chrome | 2 +- build/android/adb_profile_chrome_startup | 2 +- build/android/adb_reverse_forwarder.py | 6 +- .../android/adb_system_webengine_command_line | 16 + build/android/adb_system_webview_command_line | 2 +- .../android_only_explicit_jni_exports.lst | 2 +- build/android/android_only_jni_exports.lst | 2 +- build/android/apk_operations.py | 459 ++- build/android/apk_operations.pydeps | 8 +- build/android/apply_shared_preference_file.py | 4 +- build/android/asan_symbolize.py | 60 +- build/android/bytecode/BUILD.gn | 36 +- .../chromium/bytecode/ByteCodeProcessor.java | 2 +- .../chromium/bytecode/ByteCodeRewriter.java | 53 +- .../chromium/bytecode/ClassPathValidator.java | 7 +- .../EmptyOverrideGeneratorClassAdapter.java | 104 + .../bytecode/FragmentActivityReplacer.java | 113 +- .../bytecode/MethodCheckerClassAdapter.java | 144 + .../chromium/bytecode/MethodDescription.java | 20 + .../ParentMethodCheckerClassAdapter.java | 109 + .../chromium/bytecode/TraceEventAdder.java | 109 + .../bytecode/TraceEventAdderClassAdapter.java | 47 + .../TraceEventAdderMethodAdapter.java | 83 + .../java/org/chromium/bytecode/TypeUtils.java | 2 +- build/android/chromium_annotations.flags | 79 + build/android/convert_dex_profile.py | 42 +- build/android/convert_dex_profile_tests.py | 79 +- build/android/dcheck_is_off.flags | 11 +- build/android/devil_chromium.json | 38 +- build/android/devil_chromium.py | 4 +- build/android/diff_resource_sizes.py | 18 +- build/android/docs/README.md | 5 +- build/android/docs/android_app_bundles.md | 205 -- build/android/docs/build_config.md | 44 +- .../docs/class_verification_failures.md | 22 +- build/android/docs/coverage.md | 16 +- build/android/docs/java_asserts.md | 80 + build/android/docs/java_optimization.md | 2 +- build/android/docs/java_toolchain.md | 23 +- build/android/docs/life_of_a_resource.md | 185 +- build/android/docs/lint.md | 30 +- build/android/download_doclava.py | 4 +- build/android/dump_apk_resource_strings.py | 42 +- build/android/emma_coverage_stats.py | 479 ---- build/android/emma_coverage_stats_test.py | 561 ---- build/android/envsetup.sh | 2 +- build/android/fast_local_dev_server.py | 26 +- build/android/generate_jacoco_report.py | 34 +- build/android/gradle/AndroidManifest.xml | 2 +- build/android/gradle/android.jinja | 6 +- build/android/gradle/generate_gradle.py | 237 +- build/android/gradle/gn_to_cmake.py | 689 ----- build/android/gradle/java.jinja | 4 +- build/android/gradle/root.jinja | 26 +- build/android/gtest_apk/BUILD.gn | 2 +- .../NativeTestInstrumentationTestRunner.java | 2 +- .../build/gtest_apk/NativeTestIntent.java | 2 +- .../build/gtest_apk/TestStatusIntent.java | 2 +- .../build/gtest_apk/TestStatusReceiver.java | 2 +- build/android/gyp/OWNERS | 2 + build/android/gyp/aar.py | 42 +- build/android/gyp/aar.pydeps | 1 + build/android/gyp/aidl.py | 16 +- build/android/gyp/aidl.pydeps | 2 + build/android/gyp/allot_native_libraries.py | 5 +- .../android/gyp/allot_native_libraries.pydeps | 1 + build/android/gyp/apkbuilder.py | 144 +- build/android/gyp/apkbuilder.pydeps | 3 +- .../android/gyp/assert_static_initializers.py | 63 +- build/android/gyp/binary_baseline_profile.py | 57 + .../gyp/binary_baseline_profile.pydeps | 7 + build/android/gyp/bundletool.py | 15 +- build/android/gyp/bytecode_processor.py | 41 +- build/android/gyp/bytecode_processor.pydeps | 21 + build/android/gyp/bytecode_rewriter.py | 9 +- build/android/gyp/bytecode_rewriter.pydeps | 1 + build/android/gyp/check_flag_expectations.py | 2 +- .../gyp/check_flag_expectations.pydeps | 1 + build/android/gyp/compile_java.py | 493 ++-- build/android/gyp/compile_java.pydeps | 16 + build/android/gyp/compile_kt.py | 182 ++ build/android/gyp/compile_kt.pydeps | 33 + build/android/gyp/compile_resources.py | 541 ++-- build/android/gyp/compile_resources.pydeps | 31 +- build/android/gyp/copy_ex.py | 26 +- build/android/gyp/copy_ex.pydeps | 1 + .../gyp/create_apk_operations_script.py | 27 +- build/android/gyp/create_app_bundle.py | 281 +- build/android/gyp/create_app_bundle.pydeps | 7 +- build/android/gyp/create_app_bundle_apks.py | 23 +- .../android/gyp/create_app_bundle_apks.pydeps | 6 +- .../gyp/create_bundle_wrapper_script.py | 7 +- .../gyp/create_bundle_wrapper_script.pydeps | 1 + .../android/gyp/create_java_binary_script.py | 75 +- .../gyp/create_java_binary_script.pydeps | 1 + build/android/gyp/create_r_java.py | 17 +- build/android/gyp/create_r_java.pydeps | 7 +- build/android/gyp/create_r_txt.py | 2 +- build/android/gyp/create_r_txt.pydeps | 6 +- build/android/gyp/create_size_info_files.py | 28 +- .../android/gyp/create_size_info_files.pydeps | 1 + build/android/gyp/create_stub_manifest.py | 41 + .../gyp/create_test_apk_wrapper_script.py | 85 + ... => create_test_apk_wrapper_script.pydeps} | 4 +- .../android/gyp/create_ui_locale_resources.py | 15 +- .../gyp/create_ui_locale_resources.pydeps | 7 +- build/android/gyp/create_unwind_table.py | 1095 +++++++ .../android/gyp/create_unwind_table_tests.py | 1182 ++++++++ build/android/gyp/desugar.py | 67 - build/android/gyp/dex.py | 384 +-- build/android/gyp/dex.pydeps | 4 +- build/android/gyp/dex_jdk_libs.py | 93 - build/android/gyp/dex_test.py | 50 + build/android/gyp/dexsplitter.py | 132 - build/android/gyp/dist_aar.py | 70 +- build/android/gyp/dist_aar.pydeps | 2 + build/android/gyp/extract_unwind_tables.py | 26 +- .../gyp/extract_unwind_tables_tests.py | 93 +- build/android/gyp/filter_zip.py | 40 +- build/android/gyp/filter_zip.pydeps | 2 + build/android/gyp/finalize_apk.py | 4 +- build/android/gyp/find.py | 4 +- build/android/gyp/flatc_java.py | 42 + ...{dex_jdk_libs.pydeps => flatc_java.pydeps} | 6 +- build/android/gyp/gcc_preprocess.py | 12 +- build/android/gyp/gcc_preprocess.pydeps | 2 + build/android/gyp/generate_android_wrapper.py | 5 +- .../gyp/generate_linker_version_script.py | 36 +- .../gyp/generate_linker_version_script.pydeps | 1 + build/android/gyp/ijar.py | 9 +- build/android/gyp/ijar.pydeps | 1 + build/android/gyp/jacoco_instr.py | 65 +- build/android/gyp/jacoco_instr.pydeps | 2 + build/android/gyp/java_cpp_enum.py | 23 +- build/android/gyp/java_cpp_enum.pydeps | 2 + build/android/gyp/java_cpp_enum_tests.py | 2 +- build/android/gyp/java_cpp_features.py | 12 +- build/android/gyp/java_cpp_features.pydeps | 2 + build/android/gyp/java_cpp_features_tests.py | 101 +- build/android/gyp/java_cpp_strings.py | 8 +- build/android/gyp/java_cpp_strings.pydeps | 2 + build/android/gyp/java_cpp_strings_tests.py | 4 +- build/android/gyp/java_google_api_keys.py | 17 +- build/android/gyp/java_google_api_keys.pydeps | 1 + .../android/gyp/java_google_api_keys_tests.py | 4 +- build/android/gyp/javac_output_processor.py | 216 ++ build/android/gyp/jetify_jar.py | 68 - build/android/gyp/jinja_template.py | 15 +- build/android/gyp/jinja_template.pydeps | 7 +- build/android/gyp/lint.py | 158 +- build/android/gyp/lint.pydeps | 1 + build/android/gyp/merge_manifest.py | 110 +- build/android/gyp/merge_manifest.pydeps | 1 + .../android/gyp/native_libraries_template.py | 39 - build/android/gyp/nocompile_test.py | 133 +- build/android/gyp/optimize_resources.py | 152 + ...itter.pydeps => optimize_resources.pydeps} | 5 +- build/android/gyp/prepare_resources.py | 16 +- build/android/gyp/prepare_resources.pydeps | 7 +- build/android/gyp/process_native_prebuilt.py | 5 +- .../gyp/process_native_prebuilt.pydeps | 1 + build/android/gyp/proguard.py | 503 ++-- build/android/gyp/proguard.pydeps | 8 +- build/android/gyp/resources_shrinker/BUILD.gn | 15 - .../gyp/resources_shrinker/shrinker.py | 76 - .../gyp/resources_shrinker/shrinker.pydeps | 30 - build/android/gyp/system_image_apks.py | 62 + ...esugar.pydeps => system_image_apks.pydeps} | 4 +- .../chromium/helloworld/HelloWorldMain.java | 2 +- .../helloworld/HelloWorldPrinter.java | 2 +- .../gyp/trace_event_bytecode_rewriter.py | 50 + .../gyp/trace_event_bytecode_rewriter.pydeps | 7 + build/android/gyp/turbine.py | 121 +- build/android/gyp/turbine.pydeps | 27 + build/android/gyp/unused_resources.py | 115 + build/android/gyp/unused_resources.pydeps | 30 + build/android/gyp/util/__init__.py | 2 +- build/android/gyp/util/build_utils.py | 318 +-- build/android/gyp/util/build_utils_test.py | 2 +- build/android/gyp/util/diff_utils.py | 17 +- build/android/gyp/util/jar_info_utils.py | 2 +- build/android/gyp/util/java_cpp_utils.py | 18 +- build/android/gyp/util/manifest_utils.py | 95 +- build/android/gyp/util/manifest_utils_test.py | 12 +- build/android/gyp/util/md5_check.py | 18 +- build/android/gyp/util/md5_check_test.py | 2 +- build/android/gyp/util/parallel.py | 13 +- build/android/gyp/util/protoresources.py | 14 +- build/android/gyp/util/resource_utils.py | 189 +- build/android/gyp/util/resource_utils_test.py | 4 +- build/android/gyp/util/resources_parser.py | 29 +- build/android/gyp/util/server_utils.py | 10 +- build/android/gyp/util/zipalign.py | 97 - build/android/gyp/validate_inputs.py | 34 + .../validate_static_library_dex_references.py | 8 +- ...idate_static_library_dex_references.pydeps | 1 + build/android/gyp/write_build_config.py | 833 +++--- build/android/gyp/write_build_config.pydeps | 6 +- .../gyp/write_native_libraries_java.py | 48 +- .../gyp/write_native_libraries_java.pydeps | 2 + build/android/gyp/zip.py | 49 +- build/android/gyp/zip.pydeps | 2 + build/android/host_heartbeat.py | 4 +- build/android/incremental_install/BUILD.gn | 3 +- build/android/incremental_install/__init__.py | 2 +- .../generate_android_manifest.py | 68 +- .../generate_android_manifest.pydeps | 23 +- .../android/incremental_install/installer.py | 84 +- .../BootstrapApplication.java | 2 +- .../BootstrapInstrumentation.java | 2 +- .../ClassLoaderPatcher.java | 53 +- .../chromium/incrementalinstall/LockFile.java | 2 +- .../chromium/incrementalinstall/Reflect.java | 27 +- .../SecondInstrumentation.java | 2 +- .../AndroidHiddenApiBypass/BUILD.gn | 29 + .../AndroidHiddenApiBypass/LICENSE | 201 ++ .../AndroidHiddenApiBypass/README.chromium | 16 + .../AndroidHiddenApiBypass/README.md | 84 + .../org/lsposed/hiddenapibypass/Helper.java | 108 + .../hiddenapibypass/HiddenApiBypass.java | 415 +++ .../hiddenapibypass/library/BuildConfig.java | 9 + .../main/java/dalvik/system/VMRuntime.java | 9 + .../write_installer_json.py | 11 +- .../write_installer_json.pydeps | 1 + .../build/annotations/AlwaysInline.java | 17 + .../build/annotations/CheckDiscard.java | 24 + .../build/annotations/DoNotClassMerge.java | 20 + .../build/annotations/DoNotInline.java | 20 + .../build/annotations/DoNotStripLogs.java | 17 + .../annotations/IdentifierNameString.java | 35 + .../chromium/build/annotations/MainDex.java | 23 + .../build/annotations/MockedInTests.java | 17 + .../build/annotations/UsedByReflection.java | 22 + .../java/templates/BuildConfig.template | 14 +- .../java/templates/ProductConfig.template | 5 +- .../java/test/DefaultLocaleLintTest.java | 2 +- build/android/java/test/NewApiLintTest.java | 2 +- ...eChangeIncrementalJavacTestHelper.template | 18 + ...tureChangeIncrementalJavacTestHelper2.java | 11 + build/android/java/test/missing_symbol/B.java | 9 + .../java/test/missing_symbol/D.template | 9 + .../test/missing_symbol/Importer.template | 13 + .../java/test/missing_symbol/ImportsSubB.java | 13 + build/android/java/test/missing_symbol/c.jar | Bin 0 -> 393 bytes .../sub/BInMethodSignature.java | 13 + .../java/test/missing_symbol/sub/SubB.java | 9 + .../junit/AndroidManifest_mergetest.xml | 12 + build/android/junit/res/values/strings.xml | 8 + .../org/chromium/build/AndroidAssetsTest.java | 58 + .../chromium/build/IncrementalJavacTest.java | 33 + build/android/lighttpd_server.py | 25 +- .../list_class_verification_failures.py | 130 +- .../list_class_verification_failures_test.py | 52 +- build/android/list_java_targets.py | 125 +- build/android/main_dex_classes.flags | 11 +- build/android/method_count.py | 7 +- build/android/multidex.flags | 8 - build/android/native_flags/BUILD.gn | 2 +- build/android/native_flags/argcapture.py | 4 +- build/android/native_flags/empty.cc | 2 +- build/android/print_cipd_version.py | 46 + build/android/provision_devices.py | 19 +- build/android/pylib/__init__.py | 13 +- build/android/pylib/android/__init__.py | 2 +- .../pylib/android/logcat_symbolizer.py | 3 +- build/android/pylib/base/__init__.py | 2 +- build/android/pylib/base/base_test_result.py | 94 +- .../pylib/base/base_test_result_unittest.py | 4 +- build/android/pylib/base/environment.py | 5 +- .../android/pylib/base/environment_factory.py | 11 +- build/android/pylib/base/mock_environment.py | 4 +- .../android/pylib/base/mock_test_instance.py | 4 +- build/android/pylib/base/output_manager.py | 58 +- .../pylib/base/output_manager_factory.py | 9 +- .../pylib/base/output_manager_test_case.py | 4 +- build/android/pylib/base/test_collection.py | 7 +- build/android/pylib/base/test_exception.py | 3 +- build/android/pylib/base/test_instance.py | 4 +- .../pylib/base/test_instance_factory.py | 11 +- build/android/pylib/base/test_run.py | 11 +- build/android/pylib/base/test_run_factory.py | 5 +- build/android/pylib/base/test_server.py | 5 +- build/android/pylib/constants/__init__.py | 19 +- build/android/pylib/constants/host_paths.py | 4 +- .../pylib/constants/host_paths_unittest.py | 6 +- build/android/pylib/content_settings.py | 4 +- build/android/pylib/device/commands/BUILD.gn | 2 +- .../android/commands/unzip/Unzip.java | 2 +- build/android/pylib/device_settings.py | 3 +- build/android/pylib/dex/__init__.py | 2 +- build/android/pylib/dex/dex_parser.py | 123 +- build/android/pylib/gtest/__init__.py | 2 +- .../pylib/gtest/filter/unit_tests_disabled | 10 - build/android/pylib/gtest/gtest_config.py | 4 +- .../pylib/gtest/gtest_test_instance.py | 32 +- .../pylib/gtest/gtest_test_instance_test.py | 152 +- .../android/pylib/instrumentation/__init__.py | 2 +- .../instrumentation/instrumentation_parser.py | 6 +- .../instrumentation_parser_test.py | 6 +- .../instrumentation_test_instance.py | 527 ++-- .../instrumentation_test_instance_test.py | 326 ++- .../pylib/instrumentation/json_perf_parser.py | 4 +- .../pylib/instrumentation/test_result.py | 6 +- build/android/pylib/junit/__init__.py | 2 +- .../pylib/junit/junit_test_instance.py | 22 +- build/android/pylib/local/__init__.py | 2 +- build/android/pylib/local/device/__init__.py | 2 +- .../local/device/local_device_environment.py | 42 +- .../local/device/local_device_gtest_run.py | 169 +- .../device/local_device_gtest_run_test.py | 63 +- .../local_device_instrumentation_test_run.py | 505 +++- ...al_device_instrumentation_test_run_test.py | 36 +- .../device/local_device_monkey_test_run.py | 26 +- .../local/device/local_device_test_run.py | 138 +- .../device/local_device_test_run_test.py | 53 +- build/android/pylib/local/emulator/OWNERS | 1 - .../android/pylib/local/emulator/__init__.py | 2 +- build/android/pylib/local/emulator/avd.py | 899 ++++-- build/android/pylib/local/emulator/ini.py | 74 +- .../android/pylib/local/emulator/ini_test.py | 96 +- .../emulator/local_emulator_environment.py | 54 +- .../pylib/local/emulator/proto/__init__.py | 2 +- .../pylib/local/emulator/proto/avd.proto | 25 +- .../pylib/local/emulator/proto/avd_pb2.py | 175 +- .../pylib/local/local_test_server_spawner.py | 6 +- build/android/pylib/local/machine/__init__.py | 2 +- .../machine/local_machine_environment.py | 10 +- .../machine/local_machine_junit_test_run.py | 290 +- .../local_machine_junit_test_run_test.py | 20 +- .../pylib/monkey/monkey_test_instance.py | 5 +- build/android/pylib/output/__init__.py | 2 +- .../pylib/output/local_output_manager.py | 15 +- .../pylib/output/local_output_manager_test.py | 4 +- .../pylib/output/noop_output_manager.py | 8 +- .../pylib/output/noop_output_manager_test.py | 4 +- .../pylib/output/remote_output_manager.py | 9 +- .../output/remote_output_manager_test.py | 4 +- build/android/pylib/pexpect.py | 2 +- build/android/pylib/restart_adbd.sh | 2 +- build/android/pylib/results/__init__.py | 2 +- .../results/flakiness_dashboard/__init__.py | 2 +- .../json_results_generator.py | 83 +- .../json_results_generator_unittest.py | 21 +- .../flakiness_dashboard/results_uploader.py | 34 +- build/android/pylib/results/json_results.py | 6 +- .../pylib/results/json_results_test.py | 150 +- .../pylib/results/presentation/__init__.py | 2 +- .../presentation/javascript/main_html.js | 2 +- .../presentation/standard_gtest_merge.py | 22 +- .../presentation/test_results_presentation.py | 123 +- .../test_results_presentation.pydeps | 46 + build/android/pylib/results/report_results.py | 25 +- build/android/pylib/symbols/apk_lib_dump.py | 61 - .../android/pylib/symbols/apk_native_libs.py | 419 --- .../pylib/symbols/apk_native_libs_unittest.py | 396 --- build/android/pylib/symbols/deobfuscator.py | 193 +- build/android/pylib/symbols/elf_symbolizer.py | 487 ---- .../pylib/symbols/elf_symbolizer_unittest.py | 196 -- .../symbols/expensive_line_transformer.py | 233 ++ .../symbols/mock_addr2line/mock_addr2line | 5 +- .../android/pylib/symbols/stack_symbolizer.py | 65 +- build/android/pylib/symbols/symbol_utils.py | 814 ------ .../pylib/symbols/symbol_utils_unittest.py | 942 ------ build/android/pylib/utils/app_bundle_utils.py | 96 +- build/android/pylib/utils/argparse_utils.py | 16 +- .../android/pylib/utils/chrome_proxy_utils.py | 6 +- .../pylib/utils/chrome_proxy_utils_test.py | 12 +- build/android/pylib/utils/decorators.py | 2 +- build/android/pylib/utils/decorators_test.py | 28 +- .../pylib/utils/device_dependencies.py | 27 +- .../pylib/utils/device_dependencies_test.py | 32 +- build/android/pylib/utils/dexdump.py | 225 +- build/android/pylib/utils/dexdump_test.py | 264 +- build/android/pylib/utils/gold_utils.py | 6 +- build/android/pylib/utils/gold_utils_test.py | 8 +- .../pylib/utils/google_storage_helper.py | 14 +- .../pylib/utils/instrumentation_tracing.py | 8 +- build/android/pylib/utils/local_utils.py | 2 +- build/android/pylib/utils/logdog_helper.py | 10 +- build/android/pylib/utils/logging_utils.py | 30 +- build/android/pylib/utils/maven_downloader.py | 12 +- build/android/pylib/utils/proguard.py | 285 -- build/android/pylib/utils/proguard_test.py | 495 ---- build/android/pylib/utils/repo_utils.py | 8 +- .../pylib/utils/shared_preference_utils.py | 49 +- build/android/pylib/utils/simpleperf.py | 47 +- build/android/pylib/utils/test_filter.py | 78 +- build/android/pylib/utils/test_filter_test.py | 125 +- build/android/pylib/utils/time_profile.py | 4 +- build/android/pylib/utils/xvfb.py | 4 +- build/android/pylib/valgrind_tools.py | 15 +- build/android/resource_sizes.gni | 16 +- build/android/resource_sizes.py | 147 +- build/android/resource_sizes.pydeps | 5 +- build/android/screenshot.py | 4 +- build/android/stacktrace/BUILD.gn | 12 +- build/android/stacktrace/README.md | 4 +- .../stacktrace/crashpad_stackwalker.py | 4 +- .../org/chromium/build/FlushingReTrace.java | 64 +- build/android/stacktrace/java_deobfuscate.jar | Bin 3113 -> 0 bytes build/android/stacktrace/java_deobfuscate.py | 11 +- .../stacktrace/java_deobfuscate_java.jar | Bin 0 -> 7643 bytes .../stacktrace/java_deobfuscate_test.py | 14 +- build/android/stacktrace/stackwalker.py | 5 +- build/android/test/BUILD.gn | 99 +- .../test/incremental_javac_gn/BUILD.gn | 98 + .../incremental_javac_test_android_library.py | 154 + build/android/test/missing_symbol_test.gni | 57 + build/android/test/nocompile_gn/BUILD.gn | 58 +- .../test/nocompile_gn/nocompile_sources.gni | 8 +- build/android/test_runner.py | 310 +- build/android/test_runner.pydeps | 27 +- build/android/test_wrapper/logdog_wrapper.py | 43 +- build/android/tests/symbolize/Makefile | 2 +- build/android/tests/symbolize/a.cc | 2 +- build/android/tests/symbolize/b.cc | 2 +- build/android/tombstones.py | 6 +- build/android/unused_resources/BUILD.gn | 19 + .../UnusedResources.java} | 82 +- .../update_deps/update_third_party_deps.py | 4 +- build/android/update_verification.py | 15 +- build/android/video_recorder.py | 4 +- build/apple/apple_info_plist.gni | 2 +- build/apple/compile_entitlements.gni | 2 +- build/apple/compile_plist.gni | 2 +- build/apple/convert_plist.gni | 2 +- build/apple/plist_util.py | 57 +- build/apple/tweak_info_plist.gni | 2 +- build/apple/tweak_info_plist.py | 53 +- build/apple/write_pkg_info.py | 10 +- build/apple/xcrun.py | 4 +- build/args/README.txt | 4 - build/args/chromeos/README.md | 7 +- build/args/headless.gn | 10 +- build/build-ctags.sh | 2 +- build/build_config.h | 197 +- build/buildflag.h | 2 +- build/buildflag_header.gni | 2 +- build/check_gn_headers.py | 15 +- build/check_gn_headers_unittest.py | 10 +- build/check_gn_headers_whitelist.txt | 52 +- build/check_return_value.py | 5 +- build/chromeos/.style.yapf | 2 +- build/chromeos/PRESUBMIT.py | 24 +- build/chromeos/generate_skylab_deps.py | 206 ++ build/chromeos/generate_skylab_deps_test.py | 178 ++ build/chromeos/test_runner.py | 340 +-- build/chromeos/test_runner_test.py | 65 +- build/cipd/cipd.gni | 4 +- build/cipd/cipd_from_file.py | 2 +- build/cipd/clobber_cipd_root.py | 33 - build/clobber.py | 61 +- build/clobber_unittest.py | 148 + build/compiled_action.gni | 2 +- build/compute_build_timestamp.py | 21 +- build/config/BUILD.gn | 67 +- build/config/BUILDCONFIG.gn | 203 +- build/config/OWNERS | 5 +- build/config/aix/BUILD.gn | 14 +- build/config/android/BUILD.gn | 89 +- build/config/android/DIR_METADATA | 1 + build/config/android/abi.gni | 15 +- build/config/android/android_nocompile.gni | 40 +- build/config/android/build_vars.gni | 4 +- build/config/android/channel.gni | 2 +- build/config/android/config.gni | 187 +- build/config/android/copy_ex.gni | 2 +- build/config/android/create_unwind_table.gni | 50 + .../config/android/extract_unwind_tables.gni | 55 +- build/config/android/internal_rules.gni | 2303 ++++++++------- .../config/android/linker_version_script.gni | 11 +- build/config/android/rules.gni | 2528 ++++++++++------- build/config/android/sdk.gni | 9 +- build/config/android/system_image.gni | 174 ++ .../android/test/classpath_order/BUILD.gn | 111 - .../java/res_template/values/values.xml | 9 - .../classpath_order/ClassPathOrderTest.java | 32 - .../build/classpath_order/Dummy.java.jinja2 | 8 - build/config/android/test/proto/BUILD.gn | 2 +- .../proto/absolute_dep/absolute_dep.proto | 2 +- .../proto/relative_dep/relative_dep.proto | 2 +- .../test/proto/root/absolute_child.proto | 2 +- .../test/proto/root/absolute_root.proto | 2 +- .../test/proto/root/relative_child.proto | 2 +- .../test/proto/root/relative_root.proto | 2 +- .../android/test/resource_overlay/BUILD.gn | 6 +- .../java/res_template/values/values.xml | 2 +- .../resource_overlay/ResourceOverlayTest.java | 4 +- build/config/apple/BUILD.gn | 17 + build/config/apple/sdk_info.py | 82 +- build/config/apple/symbols.gni | 4 +- build/config/arm.gni | 46 +- build/config/buildflags_paint_preview.gni | 14 +- build/config/c++/BUILD.gn | 92 +- build/config/c++/c++.gni | 23 +- build/config/c++/libc++.natvis | 164 +- build/config/chrome_build.gni | 47 +- build/config/chromebox_for_meetings/BUILD.gn | 11 + build/config/chromebox_for_meetings/OWNERS | 1 + build/config/chromebox_for_meetings/README.md | 31 + .../chromebox_for_meetings/buildflags.gni | 8 + build/config/chromecast/BUILD.gn | 5 +- build/config/chromecast/OWNERS | 3 + build/config/chromecast_build.gni | 99 +- build/config/chromeos/BUILD.gn | 50 +- build/config/chromeos/args.gni | 16 +- build/config/chromeos/rules.gni | 300 +- build/config/chromeos/ui_mode.gni | 39 +- build/config/clang/BUILD.gn | 36 +- build/config/clang/clang.gni | 16 +- build/config/compiler/BUILD.gn | 1156 +++++--- build/config/compiler/compiler.gni | 139 +- build/config/compiler/pgo/BUILD.gn | 67 +- build/config/compiler/pgo/pgo.gni | 17 +- build/config/compute_inputs_for_analyze.gni | 2 +- build/config/coverage/BUILD.gn | 19 +- build/config/coverage/OWNERS | 4 +- build/config/coverage/coverage.gni | 11 +- build/config/cronet/OWNERS | 1 + build/config/cronet/config.gni | 10 + build/config/crypto.gni | 15 - build/config/dcheck_always_on.gni | 31 +- build/config/devtools.gni | 37 + build/config/features.gni | 17 +- build/config/freetype/BUILD.gn | 2 +- build/config/freetype/freetype.gni | 2 +- build/config/fuchsia/BUILD.gn | 92 +- build/config/fuchsia/DIR_METADATA | 8 +- build/config/fuchsia/OWNERS | 5 +- .../fuchsia/add_DebugData_service.test-cmx | 7 - .../config/fuchsia/build_cmx_from_fragment.py | 49 - build/config/fuchsia/build_symbol_archive.py | 4 +- build/config/fuchsia/config.gni | 7 +- build/config/fuchsia/extend_fvm.py | 2 +- .../fuchsia/fuchsia_package_metadata.gni | 38 + .../fuchsia/generate_runner_scripts.gni | 365 ++- build/config/fuchsia/gfx_tests.cmx | 30 - build/config/fuchsia/package.gni | 114 - ...ackaged_content_embedder_excluded_dirs.gni | 16 + build/config/fuchsia/rules.gni | 5 - .../size_optimized_cast_receiver_args.gn | 43 + ...e_optimized_cast_receiver_args_internal.gn | 18 + build/config/fuchsia/sizes.gni | 11 +- build/config/fuchsia/symbol_archive.gni | 4 +- build/config/fuchsia/test/OWNERS | 8 +- build/config/fuchsia/test/README.md | 113 +- .../test/access_test_data_dir.test-cmx | 7 - .../fuchsia/test/archivist.shard.test-cml | 28 + .../fuchsia/test/audio_capabilities.test-cmx | 18 - .../fuchsia/test/audio_output.shard.test-cml | 16 + .../chromium_system_test_facet.shard.test-cml | 8 + .../test/chromium_test_facet.shard.test-cml | 8 + .../test/context_provider.shard.test-cml | 30 + ...lf_test_ambient_exec_runner.shard.test-cml | 17 + .../test/elf_test_runner.shard.test-cml | 17 + .../fuchsia/test/font_capabilities.test-cmx | 14 - .../config/fuchsia/test/fonts.shard.test-cml | 38 + .../test/gfx_test_ui_stack.shard.test-cml | 49 + .../fuchsia/test/jit_capabilities.test-cmx | 7 - .../config/fuchsia/test/logger.shard.test-cml | 8 + .../test/mark_vmo_executable.shard.test-cml | 12 + .../fuchsia/test/minimum.shard.test-cml | 78 + .../test/minimum_capabilities.test-cmx | 29 - .../fuchsia/test/network.shard.test-cml | 20 + .../test/network_capabilities.test-cmx | 25 - .../test/platform_video_codecs.shard.test-cml | 48 + .../fuchsia/test/present_view.shard.test-cml | 42 + .../test/present_view_capabilities.test-cmx | 24 - .../fuchsia/test/read_debug_data.test-cmx | 7 - .../config/fuchsia/test/sysmem.shard.test-cml | 10 + .../test/system_test_minimum.shard.test-cml | 46 + .../fuchsia/test/test_fonts.shard.test-cml | 37 + .../test/test_logger_capabilities.test-cmx | 7 - .../fuchsia/test/test_ui_stack.shard.test-cml | 48 + .../fuchsia/test/vulkan_capabilities.test-cmx | 19 - .../web_engine_required_capabilities.test-cmx | 25 - .../fuchsia/test/web_instance.shard.test-cml | 21 + build/config/gcc/BUILD.gn | 7 +- build/config/get_host_byteorder.py | 5 +- build/config/host_byteorder.gni | 2 +- build/config/ios/BUILD.gn | 128 +- build/config/ios/Host-Info.plist | 2 +- build/config/ios/Module-Info.plist | 2 +- build/config/ios/asset_catalog.gni | 110 +- .../config/ios/bundle_data_from_filelist.gni | 24 + build/config/ios/codesign.py | 53 +- build/config/ios/compile_ib_files.py | 3 +- .../config/ios/compile_xcassets_unittests.py | 2 +- build/config/ios/config.gni | 2 +- build/config/ios/dummy.py | 2 +- build/config/ios/find_signing_identity.py | 11 +- build/config/ios/generate_umbrella_header.py | 2 +- build/config/ios/hardlink.py | 2 +- build/config/ios/ios_sdk.gni | 70 +- build/config/ios/ios_sdk_overrides.gni | 6 +- build/config/ios/ios_test_runner_wrapper.gni | 23 +- build/config/ios/ios_test_runner_xcuitest.gni | 72 + .../resources/XCTRunnerAddition+Info.plist | 2 +- build/config/ios/rules.gni | 1318 ++++----- build/config/ios/strip_arm64e.py | 2 +- build/config/ios/swift_source_set.gni | 25 + build/config/ios/write_framework_hmap.py | 3 +- build/config/ios/write_framework_modulemap.py | 2 +- build/config/ios/xctest_shell.mm | 2 +- build/config/linux/BUILD.gn | 30 +- build/config/linux/atk/BUILD.gn | 7 +- build/config/linux/atspi2/BUILD.gn | 4 +- build/config/linux/dbus/BUILD.gn | 2 +- build/config/linux/dri/BUILD.gn | 6 +- build/config/linux/gtk/BUILD.gn | 33 +- build/config/linux/gtk/gtk.gni | 8 +- build/config/linux/libdrm/BUILD.gn | 4 +- build/config/linux/libffi/BUILD.gn | 16 +- build/config/linux/libva/BUILD.gn | 2 +- build/config/linux/nss/BUILD.gn | 14 +- build/config/linux/pangocairo/BUILD.gn | 2 +- build/config/linux/pangocairo/pangocairo.gni | 4 +- build/config/linux/pkg-config.py | 5 +- build/config/linux/pkg_config.gni | 9 +- build/config/locales.gni | 365 ++- build/config/logging.gni | 22 +- build/config/mac/BUILD.gn | 37 +- build/config/mac/mac_sdk.gni | 45 +- build/config/mac/mac_sdk_overrides.gni | 2 +- build/config/mac/package_framework.py | 2 +- build/config/mac/prepare_framework_version.py | 2 +- build/config/mac/rules.gni | 19 +- build/config/mips.gni | 2 +- build/config/nacl/BUILD.gn | 37 +- build/config/nacl/config.gni | 24 +- build/config/nacl/host_toolchain.gni | 18 + build/config/nacl/rules.gni | 54 +- build/config/ozone.gni | 40 +- build/config/ozone_extra.gni | 11 +- build/config/pch.gni | 5 +- build/config/posix/BUILD.gn | 23 +- build/config/posix/sysroot_ld_path.py | 24 - build/config/profiling/OWNERS | 4 +- build/config/profiling/profiling.gni | 2 +- build/config/python.gni | 114 +- build/config/riscv.gni | 19 + build/config/rust.gni | 310 ++ build/config/sanitizers/BUILD.gn | 165 +- build/config/sanitizers/OWNERS | 2 - build/config/sanitizers/sanitizers.gni | 92 +- build/config/siso/.gitignore | 1 + build/config/siso/OWNERS | 6 + build/config/siso/README.md | 8 + build/config/siso/clang_linux.star | 109 + build/config/siso/configure_siso.py | 36 + build/config/siso/linux.star | 43 + build/config/siso/mac.star | 23 + build/config/siso/main.star | 47 + build/config/siso/mojo.star | 129 + build/config/siso/nacl_linux.star | 179 ++ build/config/siso/remote_exec_wrapper.star | 58 + build/config/siso/simple.star | 46 + build/config/siso/windows.star | 23 + build/config/sysroot.gni | 16 +- build/config/ui.gni | 25 +- build/config/v8_target_cpu.gni | 5 +- build/config/win/BUILD.gn | 132 +- build/config/win/console_app.gni | 2 +- build/config/win/control_flow_guard.gni | 4 +- build/config/win/manifest.gni | 2 +- build/config/win/visual_studio_version.gni | 11 +- build/config/x64.gni | 24 - build/config/zip.gni | 13 +- build/config/zos/BUILD.gn | 57 + build/copy_test_data_ios.py | 5 +- build/cp.py | 4 +- build/del_ninja_deps_cache.py | 40 + build/detect_host_arch.py | 5 +- build/dir_exists.py | 4 +- build/docs/debugging_slow_builds.md | 49 +- build/docs/writing_gn_templates.md | 25 +- build/dotfile_settings.gni | 10 +- build/download_nacl_toolchains.py | 5 +- build/env_dump.py | 4 +- build/extract_from_cab.py | 5 +- build/extract_partition.py | 163 +- build/find_depot_tools.py | 5 +- build/fix_gn_headers.py | 5 +- build/fuchsia/COMMON_METADATA | 5 + build/fuchsia/DIR_METADATA | 6 +- build/fuchsia/OWNERS | 18 +- build/fuchsia/PRESUBMIT.py | 47 + build/fuchsia/SECURITY_OWNERS | 16 + build/fuchsia/aemu_target.py | 126 - build/fuchsia/amber_repo.py | 172 -- build/fuchsia/binary_size_differ.py | 153 + build/fuchsia/binary_size_differ_test.py | 171 ++ build/fuchsia/binary_sizes.py | 106 +- build/fuchsia/binary_sizes_test.py | 139 +- build/fuchsia/boot_data.py | 114 - build/fuchsia/cipd/BUILD.gn | 436 +++ build/fuchsia/cipd/DIR_METADATA | 1 + build/fuchsia/cipd/README.md | 11 + build/fuchsia/cipd/version.template | 1 + build/fuchsia/common.py | 140 - build/fuchsia/common_args.py | 173 -- build/fuchsia/deploy_to_amber_repo.py | 66 - build/fuchsia/device_target.py | 280 -- build/fuchsia/emu_target.py | 142 - build/fuchsia/gcs_download.py | 51 + build/fuchsia/gcs_download_test.py | 88 + build/fuchsia/generic_x64_target.py | 99 - build/fuchsia/linux.sdk.sha1 | 1 - build/fuchsia/linux_internal.sdk.sha1 | 1 + build/fuchsia/mac.sdk.sha1 | 1 - build/fuchsia/net_test_server.py | 90 - build/fuchsia/qemu_image.py | 75 - build/fuchsia/qemu_target.py | 255 -- build/fuchsia/qemu_target_test.py | 58 - build/fuchsia/remote_cmd.py | 131 - build/fuchsia/run_test_package.py | 278 -- build/fuchsia/runner_exceptions.py | 78 - build/fuchsia/runner_logs.py | 96 - build/fuchsia/symbolizer.py | 70 - build/fuchsia/target.py | 322 --- build/fuchsia/test/.coveragerc | 8 + build/fuchsia/test/.style.yapf | 2 + build/fuchsia/test/OWNERS | 3 + build/fuchsia/test/PRESUBMIT.py | 51 + build/fuchsia/test/base_ermine_ctl.py | 201 ++ .../fuchsia/test/base_ermine_ctl_unittests.py | 236 ++ build/fuchsia/test/common.py | 617 ++++ build/fuchsia/test/common_unittests.py | 54 + build/fuchsia/test/compatible_utils.py | 207 ++ .../test/compatible_utils_unittests.py | 238 ++ build/fuchsia/test/coveragetest.py | 59 + build/fuchsia/test/deploy_to_fuchsia.py | 44 + .../test/deploy_to_fuchsia_unittests.py | 38 + build/fuchsia/test/ermine_ctl.py | 25 + build/fuchsia/test/ffx_emulator.py | 162 ++ build/fuchsia/test/ffx_emulator_unittests.py | 49 + build/fuchsia/test/ffx_integration.py | 236 ++ build/fuchsia/test/flash_device.py | 243 ++ build/fuchsia/test/flash_device_unittests.py | 349 +++ build/fuchsia/test/lockfile.py | 79 + build/fuchsia/test/log_manager.py | 160 ++ build/fuchsia/test/log_manager_unittests.py | 115 + build/fuchsia/test/publish_package.py | 68 + .../fuchsia/test/publish_package_unittests.py | 103 + build/fuchsia/test/pylintrc | 26 + build/fuchsia/test/run_blink_test.py | 36 + build/fuchsia/test/run_executable_test.py | 263 ++ build/fuchsia/test/run_pytype.py | 42 + build/fuchsia/test/run_telemetry_test.py | 61 + build/fuchsia/test/run_test.py | 127 + build/fuchsia/test/run_webpage_test.py | 60 + build/fuchsia/test/serve_repo.py | 98 + build/fuchsia/test/serve_repo_unittests.py | 89 + build/fuchsia/test/start_emulator.py | 83 + build/fuchsia/test/test_runner.py | 74 + build/fuchsia/test/test_server.py | 130 + build/fuchsia/test/test_server_unittests.py | 84 + build/fuchsia/test_runner.py | 257 -- build/fuchsia/update_images.py | 214 +- build/fuchsia/update_images_test.py | 97 + build/fuchsia/update_product_bundles.py | 359 +++ build/fuchsia/update_product_bundles_test.py | 288 ++ build/fuchsia/update_sdk.py | 197 +- build/fuchsia/update_sdk_test.py | 69 + build/gdb-add-index | 2 +- build/get_landmines.py | 8 +- build/get_symlink_targets.py | 4 +- build/gn_helpers.py | 2 +- build/gn_helpers_unittest.py | 3 +- build/gn_logs.gni | 2 +- build/gn_run_binary.py | 3 +- build/install-build-deps-android.sh | 40 - build/install-build-deps.sh | 309 +- build/install-chroot.sh | 2 +- build/ios/PRESUBMIT.py | 20 + build/ios/extension_bundle_data.gni | 23 + build/ios/intent_definition.gni | 2 +- build/ios/presubmit_support.py | 39 + build/ios/presubmit_support_test.py | 165 ++ build/ios/test_data/bar.html | 0 build/ios/test_data/basic.filelist | 7 + build/ios/test_data/basic.globlist | 5 + build/ios/test_data/comment.filelist | 2 + build/ios/test_data/comment.globlist | 7 + .../test_data/different_local_path.filelist | 9 + .../test_data/different_local_path.globlist | 6 + build/ios/test_data/duplicates.filelist | 7 + build/ios/test_data/duplicates.globlist | 7 + build/ios/test_data/exclusions.filelist | 9 + build/ios/test_data/exclusions.globlist | 6 + build/ios/test_data/extra.filelist | 8 + build/ios/test_data/extra.globlist | 5 + build/ios/test_data/foo.css | 0 .../ignore_outside_globlist_dir.filelist | 8 + .../ignore_outside_globlist_dir.globlist | 8 + build/ios/test_data/missing.filelist | 9 + build/ios/test_data/missing.globlist | 8 + .../test_data/outside_globlist_dir.filelist | 8 + .../test_data/outside_globlist_dir.globlist | 6 + build/ios/test_data/reorder.filelist | 9 + build/ios/test_data/reorder.globlist | 6 + .../test_data/repository_relative.filelist | 9 + .../test_data/repository_relative.globlist | 6 + build/ios/test_data/subdirectory/baz.txt | 0 build/ios/update_bundle_filelist.py | 318 +++ build/lacros/BUILD.gn | 24 +- build/lacros/OWNERS | 1 - build/lacros/PRESUBMIT.py | 14 +- build/lacros/README.md | 11 + build/lacros/lacros_resource_sizes.gni | 2 +- build/lacros/lacros_resource_sizes.py | 77 +- build/lacros/lacros_resource_sizes.pydeps | 4 + .../lacros/mojo_connection_lacros_launcher.py | 109 +- build/lacros/test_runner.py | 598 +++- build/lacros/test_runner_test.py | 139 +- build/landmine_utils.py | 2 +- build/landmines.py | 4 +- build/linux/BUILD.gn | 5 +- build/linux/chrome.map | 8 + build/linux/dump_app_syms.py | 5 +- build/linux/extract_symbols.gni | 3 +- build/linux/install-chromeos-fonts.py | 5 +- build/linux/libpci/BUILD.gn | 2 +- build/linux/libudev/BUILD.gn | 3 +- build/linux/rewrite_dirs.py | 5 +- build/linux/strip_binary.gni | 22 +- build/linux/strip_binary.py | 6 +- build/linux/sysroot_ld_path.sh | 99 - .../linux/sysroot_scripts/build_and_upload.py | 24 +- .../find_incompatible_glibc_symbols.py | 63 - ...ind_incompatible_glibc_symbols_unittest.py | 41 - ...ve_unstable_gpg.sh => generate_keyring.sh} | 14 +- .../generated_package_lists/bullseye.amd64 | 411 +++ .../generated_package_lists/bullseye.arm | 411 +++ .../generated_package_lists/bullseye.arm64 | 414 +++ .../generated_package_lists/bullseye.armel | 409 +++ .../generated_package_lists/bullseye.i386 | 409 +++ .../generated_package_lists/bullseye.mips64el | 404 +++ .../generated_package_lists/bullseye.mipsel | 403 +++ .../generated_package_lists/sid.amd64 | 372 --- .../generated_package_lists/sid.arm | 368 --- .../generated_package_lists/sid.arm64 | 371 --- .../generated_package_lists/sid.armel | 367 --- .../generated_package_lists/sid.i386 | 368 --- .../generated_package_lists/sid.mips64el | 359 --- .../generated_package_lists/sid.mipsel | 359 --- .../linux/sysroot_scripts/install-sysroot.py | 33 +- ...ebian_archive_unstable.gpg => keyring.gpg} | Bin 81498 -> 94381 bytes .../linux/sysroot_scripts/libdbus-1-3-symbols | 235 -- .../sysroot_scripts/libxkbcommon0-symbols | 93 - .../sysroot_scripts/merge-package-lists.py | 6 +- .../linux/sysroot_scripts/reversion_glibc.py | 124 + ...tor-sid.sh => sysroot-creator-bullseye.sh} | 158 +- .../linux/sysroot_scripts/sysroot-creator.sh | 221 +- build/linux/sysroot_scripts/sysroots.json | 63 +- .../update-archive-timestamp.sh | 2 +- build/linux/unbundle/absl_algorithm.gn | 22 + build/linux/unbundle/absl_base.gn | 67 + build/linux/unbundle/absl_cleanup.gn | 20 + build/linux/unbundle/absl_container.gn | 119 + build/linux/unbundle/absl_debugging.gn | 47 + build/linux/unbundle/absl_flags.gn | 50 + build/linux/unbundle/absl_functional.gn | 49 + build/linux/unbundle/absl_hash.gn | 22 + build/linux/unbundle/absl_log.gn | 13 + build/linux/unbundle/absl_log_internal.gn | 1 + build/linux/unbundle/absl_memory.gn | 20 + build/linux/unbundle/absl_meta.gn | 20 + build/linux/unbundle/absl_numeric.gn | 32 + build/linux/unbundle/absl_random.gn | 17 + build/linux/unbundle/absl_status.gn | 38 + build/linux/unbundle/absl_strings.gn | 93 + build/linux/unbundle/absl_synchronization.gn | 22 + build/linux/unbundle/absl_time.gn | 21 + build/linux/unbundle/absl_types.gn | 97 + build/linux/unbundle/absl_utility.gn | 17 + build/linux/unbundle/brotli.gn | 35 + build/linux/unbundle/crc32c.gn | 11 + build/linux/unbundle/dav1d.gn | 23 + build/linux/unbundle/double-conversion.gn | 23 + build/linux/unbundle/ffmpeg.gn | 3 +- build/linux/unbundle/flac.gn | 2 +- build/linux/unbundle/fontconfig.gn | 2 +- build/linux/unbundle/freetype.gn | 2 +- build/linux/unbundle/harfbuzz-ng.gn | 2 +- build/linux/unbundle/icu.gn | 2 +- build/linux/unbundle/jsoncpp.gn | 32 + build/linux/unbundle/libXNVCtrl.gn | 19 + build/linux/unbundle/libaom.gn | 34 + build/linux/unbundle/libavif.gn | 16 + build/linux/unbundle/libdrm.gn | 2 +- build/linux/unbundle/libevent.gn | 2 +- build/linux/unbundle/libjpeg.gn | 2 +- build/linux/unbundle/libpng.gn | 2 +- build/linux/unbundle/libvpx.gn | 2 +- build/linux/unbundle/libwebp.gn | 4 +- build/linux/unbundle/libxml.gn | 9 +- build/linux/unbundle/libxslt.gn | 2 +- build/linux/unbundle/libyuv.gn | 37 + build/linux/unbundle/openh264.gn | 4 +- build/linux/unbundle/opus.gn | 2 +- build/linux/unbundle/re2.gn | 2 +- .../unbundle/remove_bundled_libraries.py | 6 +- build/linux/unbundle/replace_gn_files.py | 48 +- build/linux/unbundle/snappy.gn | 2 +- .../unbundle/swiftshader-SPIRV-Headers.gn | 17 + .../linux/unbundle/swiftshader-SPIRV-Tools.gn | 32 + build/linux/unbundle/vulkan-SPIRV-Headers.gn | 19 + build/linux/unbundle/vulkan-SPIRV-Tools.gn | 69 + build/linux/unbundle/woff2.gn | 20 + build/linux/unbundle/zlib.gn | 2 +- build/locale_tool.py | 32 +- build/mac/find_sdk.py | 29 +- build/mac/should_use_hermetic_xcode.py | 3 +- build/mac_toolchain.py | 30 +- build/{lacros => }/metadata.json.in | 0 build/nocompile.gni | 26 +- build/noop.py | 2 +- build/partitioned_shared_library.gni | 7 +- build/precompile.cc | 2 +- build/precompile.h | 2 +- build/print_python_deps.py | 95 +- build/private_code_test/BUILD.gn | 47 + build/private_code_test/README.md | 36 + build/private_code_test/list_gclient_deps.py | 43 + build/private_code_test/private_code_test.gni | 63 + build/private_code_test/private_code_test.py | 135 + build/protoc_java.py | 31 +- build/protoc_java.pydeps | 2 + build/redirect_stdout.py | 13 +- build/rm.py | 5 +- build/rust/BUILD.gn | 84 + build/rust/OWNERS | 7 + build/rust/analyze.gni | 79 + build/rust/cargo_crate.gni | 340 +++ build/rust/collect_rust_sources.py | 64 + build/rust/filter_clang_args.py | 31 + build/rust/rs_bindings_from_cc.gni | 297 ++ build/rust/run_bindgen.py | 98 + build/rust/run_build_script.py | 164 ++ build/rust/run_rs_bindings_from_cc.py | 127 + build/rust/rust_bindgen.gni | 193 ++ build/rust/rust_executable.gni | 70 + build/rust/rust_macro.gni | 19 + build/rust/rust_shared_library.gni | 26 + build/rust/rust_static_library.gni | 169 ++ build/rust/rust_target.gni | 448 +++ build/rust/rust_unit_test.gni | 138 + build/rust/rust_unit_tests_group.gni | 93 + build/rust/rustc_wrapper.py | 157 + build/rust/std/BUILD.gn | 346 +++ build/rust/std/fake_root/.cargo/config.toml | 5 + build/rust/std/fake_root/.gitignore | 2 + build/rust/std/fake_root/Cargo.toml | 16 + build/rust/std/fake_root/README.md | 2 + build/rust/std/fake_root/src/main.rs | 3 + build/rust/std/find_std_rlibs.py | 164 ++ build/rust/std/gnrt_config.toml | 60 + build/rust/std/immediate_crash.h | 170 ++ build/rust/std/remap_alloc.cc | 152 + build/rust/std/rules/BUILD.gn | 878 ++++++ build/rust/tests/BUILD.gn | 97 + build/rust/tests/bindgen_test/BUILD.gn | 48 + build/rust/tests/bindgen_test/lib.c | 11 + build/rust/tests/bindgen_test/lib.h | 45 + build/rust/tests/bindgen_test/lib2.h | 10 + build/rust/tests/bindgen_test/main.rs | 9 + build/rust/tests/bindgen_test/src/lib.rs | 25 + build/rust/tests/test_aliased_deps/BUILD.gn | 30 + build/rust/tests/test_aliased_deps/lib.rs | 11 + build/rust/tests/test_aliased_deps/main.rs | 7 + .../rust/tests/test_aliased_deps/real_name.rs | 11 + build/rust/tests/test_bin_crate/BUILD.gn | 15 + .../rust/tests/test_bin_crate/crate/build.rs | 62 + .../tests/test_bin_crate/crate/src/main.rs | 15 + .../tests/test_control_flow_guard/BUILD.gn | 14 + .../test_control_flow_guard.rs | 43 + .../tests/test_cpp_including_rust/BUILD.gn | 23 + .../tests/test_cpp_including_rust/main.cc | 11 + .../test_cpp_including_rust/unittests.cc | 31 + build/rust/tests/test_local_std/BUILD.gn | 23 + build/rust/tests/test_local_std/lib.rs | 8 + build/rust/tests/test_local_std/main.rs | 7 + .../rust/tests/test_proc_macro_crate/BUILD.gn | 12 + .../test_proc_macro_crate/crate/src/lib.rs | 10 + build/rust/tests/test_rlib_crate/BUILD.gn | 55 + .../rust/tests/test_rlib_crate/crate/build.rs | 90 + .../tests/test_rlib_crate/crate/src/lib.rs | 56 + .../tests/test_rlib_crate/crate/src/main.rs | 7 + .../tests/test_rs_bindings_from_cc/BUILD.gn | 58 + .../tests/test_rs_bindings_from_cc/main.rs | 32 + .../self_contained_target_header1.h | 12 + .../self_contained_target_header2.cc | 9 + .../self_contained_target_header2.h | 14 + .../target_depending_on_another.h | 14 + build/rust/tests/test_rust_exe/BUILD.gn | 17 + build/rust/tests/test_rust_exe/main.rs | 32 + .../BUILD.gn | 25 + .../main.rs | 8 + .../transitive_lib.rs | 7 + .../v1/BUILD.gn | 12 + .../v1/src/lib.rs | 7 + .../v2/BUILD.gn | 12 + .../v2/src/lib.rs | 7 + .../tests/test_rust_shared_library/BUILD.gn | 12 + .../tests/test_rust_shared_library/src/lib.rs | 41 + .../tests/test_rust_static_library/BUILD.gn | 12 + .../tests/test_rust_static_library/src/lib.rs | 48 + .../BUILD.gn | 15 + .../foo.rs | 12 + build/rust/tests/test_rust_unittests/BUILD.gn | 11 + build/rust/tests/test_rust_unittests/main.rs | 20 + .../tests/test_serde_json_lenient/BUILD.gn | 27 + .../rust/tests/test_serde_json_lenient/lib.rs | 29 + .../test_serde_json_lenient/unittests.cc | 10 + .../rust/tests/test_simple_rust_exe/BUILD.gn | 12 + build/rust/tests/test_simple_rust_exe/main.rs | 7 + build/sanitize-mac-build-log.sed | 2 +- build/sanitize-mac-build-log.sh | 2 +- build/sanitize-win-build-log.sed | 2 +- build/sanitize-win-build-log.sh | 2 +- build/sanitizers/OWNERS | 2 - build/sanitizers/asan_suppressions.cc | 2 +- build/sanitizers/lsan_suppressions.cc | 43 +- build/sanitizers/sanitizer_options.cc | 91 +- build/sanitizers/tsan_suppressions.cc | 79 +- build/shim_headers.gni | 8 +- build/skia_gold_common/PRESUBMIT.py | 35 +- build/skia_gold_common/__init__.py | 2 +- .../output_managerless_skia_gold_session.py | 56 +- ..._managerless_skia_gold_session_unittest.py | 51 +- build/skia_gold_common/run_pytype.py | 44 + .../skia_gold_common/skia_gold_properties.py | 92 +- .../skia_gold_properties_unittest.py | 67 +- build/skia_gold_common/skia_gold_session.py | 138 +- .../skia_gold_session_manager.py | 31 +- .../skia_gold_session_manager_unittest.py | 36 +- .../skia_gold_session_unittest.py | 678 ++--- build/skia_gold_common/unittest_utils.py | 39 +- build/symlink.gni | 5 +- build/symlink.py | 5 +- build/timestamp.gni | 2 +- build/toolchain/BUILD.gn | 10 +- build/toolchain/OWNERS | 5 +- build/toolchain/aix/BUILD.gn | 4 +- build/toolchain/android/BUILD.gn | 96 +- build/toolchain/android/DIR_METADATA | 1 + build/toolchain/apple/.style.yapf | 2 + build/toolchain/apple/BUILD.gn | 2 +- build/toolchain/apple/filter_libtool.py | 3 +- build/toolchain/apple/get_tool_mtime.py | 3 +- build/toolchain/apple/linker_driver.py | 610 ++-- build/toolchain/apple/toolchain.gni | 454 ++- build/toolchain/cc_wrapper.gni | 6 +- .../toolchain/clang_code_coverage_wrapper.py | 12 +- build/toolchain/concurrent_links.gni | 23 +- build/toolchain/cros/BUILD.gn | 350 ++- build/toolchain/cros_toolchain.gni | 12 +- build/toolchain/fuchsia/BUILD.gn | 7 +- build/toolchain/fuchsia/DIR_METADATA | 8 +- build/toolchain/fuchsia/OWNERS | 2 +- build/toolchain/gcc_link_wrapper.py | 15 +- build/toolchain/gcc_solink_wrapper.py | 77 +- build/toolchain/gcc_toolchain.gni | 347 ++- build/toolchain/get_concurrent_links.py | 67 +- build/toolchain/get_cpu_count.py | 3 +- build/toolchain/get_goma_dir.py | 2 +- build/toolchain/goma.gni | 7 +- build/toolchain/ios/BUILD.gn | 30 +- build/toolchain/ios/compile_xcassets.py | 95 +- build/toolchain/ios/swiftc.py | 201 +- build/toolchain/kythe.gni | 2 +- build/toolchain/linux/BUILD.gn | 159 +- build/toolchain/linux/unbundle/BUILD.gn | 2 +- build/toolchain/linux/unbundle/README.md | 2 +- build/toolchain/mac/BUILD.gn | 57 +- build/toolchain/nacl/BUILD.gn | 20 +- build/toolchain/nacl_toolchain.gni | 16 +- build/toolchain/rbe.gni | 64 +- build/toolchain/toolchain.gni | 30 +- build/toolchain/whole_archive.py | 51 + build/toolchain/win/BUILD.gn | 480 +--- build/toolchain/win/midl.gni | 9 +- build/toolchain/win/midl.py | 9 +- build/toolchain/win/ml.py | 94 +- build/toolchain/win/rc/linux64/rc.sha1 | 2 +- build/toolchain/win/rc/mac/rc.sha1 | 2 +- build/toolchain/win/rc/rc.py | 5 +- build/toolchain/win/rc/upload_rc_binaries.sh | 2 +- build/toolchain/win/rc/win/rc.exe.sha1 | 2 +- build/toolchain/win/setup_toolchain.py | 45 +- build/toolchain/win/tool_wrapper.py | 15 +- build/toolchain/win/toolchain.gni | 691 +++++ build/toolchain/win/win_toolchain_data.gni | 43 + build/toolchain/wrapper_utils.py | 8 +- build/toolchain/zos/BUILD.gn | 174 ++ build/tree_truth.sh | 2 +- build/update-linux-sandbox.sh | 2 +- build/util/BUILD.gn | 26 +- build/util/PRESUBMIT.py | 10 +- build/util/action_remote.py | 146 + build/util/android_chrome_version.py | 201 +- build/util/android_chrome_version_test.py | 828 +++++- build/util/branding.gni | 2 +- build/util/chromium_git_revision.h.in | 8 + build/util/generate_wrapper.gni | 67 +- build/util/generate_wrapper.py | 62 +- build/util/java_action.gni | 2 +- build/util/java_action.py | 4 +- build/util/lastchange.gni | 2 +- build/util/lastchange.py | 47 +- build/util/lib/__init__.py | 0 build/util/lib/common/PRESUBMIT.py | 12 +- .../lib/common/chrome_test_server_spawner.py | 194 +- .../util/lib/common/perf_result_data_type.py | 2 +- .../lib/common/perf_tests_results_helper.py | 3 +- build/util/lib/common/unittest_util.py | 6 +- build/util/lib/common/unittest_util_test.py | 4 +- build/util/lib/common/util.py | 2 +- build/util/lib/results/DIR_METADATA | 11 + build/util/lib/results/OWNERS | 1 + build/util/lib/results/__init__.py | 0 .../base => util/lib/results}/result_sink.py | 122 +- build/util/lib/results/result_sink_test.py | 138 + build/util/lib/results/result_types.py | 25 + build/util/process_version.gni | 12 +- build/util/python2_action.py | 27 - build/util/version.gni | 149 - build/util/version.py | 31 +- build/util/version_test.py | 48 +- build/util/webkit_version.h.in | 9 - build/vs_toolchain.py | 184 +- build/whitespace_file.txt | 23 +- build/win/BUILD.gn | 106 +- build/win/compatibility.manifest | 3 + build/win/copy_cdb_to_output.py | 16 +- build/win/gn_meta_sln.py | 3 +- build/win/message_compiler.gni | 2 +- build/win/message_compiler.py | 3 +- build/win/reorder-imports.py | 12 +- build/win/set_appcontainer_acls.py | 2 +- build/win/use_ansi_codes.py | 5 +- build/write_build_date_header.py | 37 - build/write_buildflag_header.py | 24 +- build/xcode_binaries.yaml | 10 +- build/zip_helpers.py | 238 ++ build/zip_helpers_unittest.py | 58 + build_overrides/build.gni | 2 + build_overrides/crypto.gni | 17 + {build => build_overrides}/util/is_python2.py | 3 +- crypto/BUILD.gn | 2 +- starboard/build/config/BUILDCONFIG.gn | 2 +- .../clang/3.9/platform_configuration/BUILD.gn | 3 + third_party/angle/gni/angle.gni | 2 +- 1176 files changed, 51185 insertions(+), 29493 deletions(-) delete mode 100644 build/METADATA create mode 100644 build/PRESUBMIT.py create mode 100755 build/PRESUBMIT_test.py create mode 100644 build/action_helpers.py create mode 100755 build/action_helpers_unittest.py create mode 100644 build/android/COMMON_METADATA create mode 100755 build/android/adb_system_webengine_command_line create mode 100644 build/android/bytecode/java/org/chromium/bytecode/EmptyOverrideGeneratorClassAdapter.java create mode 100644 build/android/bytecode/java/org/chromium/bytecode/MethodCheckerClassAdapter.java create mode 100644 build/android/bytecode/java/org/chromium/bytecode/MethodDescription.java create mode 100644 build/android/bytecode/java/org/chromium/bytecode/ParentMethodCheckerClassAdapter.java create mode 100644 build/android/bytecode/java/org/chromium/bytecode/TraceEventAdder.java create mode 100644 build/android/bytecode/java/org/chromium/bytecode/TraceEventAdderClassAdapter.java create mode 100644 build/android/bytecode/java/org/chromium/bytecode/TraceEventAdderMethodAdapter.java create mode 100644 build/android/chromium_annotations.flags mode change 100644 => 100755 build/android/convert_dex_profile_tests.py delete mode 100644 build/android/docs/android_app_bundles.md create mode 100644 build/android/docs/java_asserts.md delete mode 100755 build/android/emma_coverage_stats.py delete mode 100755 build/android/emma_coverage_stats_test.py delete mode 100755 build/android/gradle/gn_to_cmake.py create mode 100755 build/android/gyp/binary_baseline_profile.py create mode 100644 build/android/gyp/binary_baseline_profile.pydeps create mode 100755 build/android/gyp/compile_kt.py create mode 100644 build/android/gyp/compile_kt.pydeps create mode 100755 build/android/gyp/create_stub_manifest.py create mode 100755 build/android/gyp/create_test_apk_wrapper_script.py rename build/android/gyp/{jetify_jar.pydeps => create_test_apk_wrapper_script.pydeps} (50%) create mode 100755 build/android/gyp/create_unwind_table.py create mode 100755 build/android/gyp/create_unwind_table_tests.py delete mode 100755 build/android/gyp/desugar.py delete mode 100755 build/android/gyp/dex_jdk_libs.py create mode 100755 build/android/gyp/dex_test.py delete mode 100755 build/android/gyp/dexsplitter.py create mode 100755 build/android/gyp/flatc_java.py rename build/android/gyp/{dex_jdk_libs.pydeps => flatc_java.pydeps} (53%) create mode 100755 build/android/gyp/javac_output_processor.py delete mode 100755 build/android/gyp/jetify_jar.py delete mode 100644 build/android/gyp/native_libraries_template.py create mode 100755 build/android/gyp/optimize_resources.py rename build/android/gyp/{dexsplitter.pydeps => optimize_resources.pydeps} (52%) delete mode 100644 build/android/gyp/resources_shrinker/BUILD.gn delete mode 100755 build/android/gyp/resources_shrinker/shrinker.py delete mode 100644 build/android/gyp/resources_shrinker/shrinker.pydeps create mode 100755 build/android/gyp/system_image_apks.py rename build/android/gyp/{desugar.pydeps => system_image_apks.pydeps} (58%) create mode 100755 build/android/gyp/trace_event_bytecode_rewriter.py create mode 100644 build/android/gyp/trace_event_bytecode_rewriter.pydeps create mode 100755 build/android/gyp/unused_resources.py create mode 100644 build/android/gyp/unused_resources.pydeps delete mode 100644 build/android/gyp/util/zipalign.py create mode 100755 build/android/gyp/validate_inputs.py create mode 100644 build/android/incremental_install/third_party/AndroidHiddenApiBypass/BUILD.gn create mode 100644 build/android/incremental_install/third_party/AndroidHiddenApiBypass/LICENSE create mode 100644 build/android/incremental_install/third_party/AndroidHiddenApiBypass/README.chromium create mode 100644 build/android/incremental_install/third_party/AndroidHiddenApiBypass/README.md create mode 100644 build/android/incremental_install/third_party/AndroidHiddenApiBypass/library/src/main/java/org/lsposed/hiddenapibypass/Helper.java create mode 100644 build/android/incremental_install/third_party/AndroidHiddenApiBypass/library/src/main/java/org/lsposed/hiddenapibypass/HiddenApiBypass.java create mode 100644 build/android/incremental_install/third_party/AndroidHiddenApiBypass/local_modifications/org/lsposed/hiddenapibypass/library/BuildConfig.java create mode 100644 build/android/incremental_install/third_party/AndroidHiddenApiBypass/stub/src/main/java/dalvik/system/VMRuntime.java create mode 100644 build/android/java/src/org/chromium/build/annotations/AlwaysInline.java create mode 100644 build/android/java/src/org/chromium/build/annotations/CheckDiscard.java create mode 100644 build/android/java/src/org/chromium/build/annotations/DoNotClassMerge.java create mode 100644 build/android/java/src/org/chromium/build/annotations/DoNotInline.java create mode 100644 build/android/java/src/org/chromium/build/annotations/DoNotStripLogs.java create mode 100644 build/android/java/src/org/chromium/build/annotations/IdentifierNameString.java create mode 100644 build/android/java/src/org/chromium/build/annotations/MainDex.java create mode 100644 build/android/java/src/org/chromium/build/annotations/MockedInTests.java create mode 100644 build/android/java/src/org/chromium/build/annotations/UsedByReflection.java create mode 100644 build/android/java/test/NoSignatureChangeIncrementalJavacTestHelper.template create mode 100644 build/android/java/test/NoSignatureChangeIncrementalJavacTestHelper2.java create mode 100644 build/android/java/test/missing_symbol/B.java create mode 100644 build/android/java/test/missing_symbol/D.template create mode 100644 build/android/java/test/missing_symbol/Importer.template create mode 100644 build/android/java/test/missing_symbol/ImportsSubB.java create mode 100644 build/android/java/test/missing_symbol/c.jar create mode 100644 build/android/java/test/missing_symbol/sub/BInMethodSignature.java create mode 100644 build/android/java/test/missing_symbol/sub/SubB.java create mode 100644 build/android/junit/AndroidManifest_mergetest.xml create mode 100644 build/android/junit/res/values/strings.xml create mode 100644 build/android/junit/src/org/chromium/build/AndroidAssetsTest.java create mode 100644 build/android/junit/src/org/chromium/build/IncrementalJavacTest.java mode change 100644 => 100755 build/android/list_class_verification_failures_test.py delete mode 100644 build/android/multidex.flags create mode 100755 build/android/print_cipd_version.py create mode 100644 build/android/pylib/results/presentation/test_results_presentation.pydeps delete mode 100755 build/android/pylib/symbols/apk_lib_dump.py delete mode 100644 build/android/pylib/symbols/apk_native_libs.py delete mode 100644 build/android/pylib/symbols/apk_native_libs_unittest.py delete mode 100644 build/android/pylib/symbols/elf_symbolizer.py delete mode 100755 build/android/pylib/symbols/elf_symbolizer_unittest.py create mode 100644 build/android/pylib/symbols/expensive_line_transformer.py delete mode 100644 build/android/pylib/symbols/symbol_utils.py delete mode 100644 build/android/pylib/symbols/symbol_utils_unittest.py delete mode 100644 build/android/pylib/utils/proguard.py delete mode 100755 build/android/pylib/utils/proguard_test.py delete mode 100644 build/android/stacktrace/java_deobfuscate.jar create mode 100644 build/android/stacktrace/java_deobfuscate_java.jar create mode 100644 build/android/test/incremental_javac_gn/BUILD.gn create mode 100755 build/android/test/incremental_javac_gn/incremental_javac_test_android_library.py create mode 100644 build/android/test/missing_symbol_test.gni create mode 100644 build/android/unused_resources/BUILD.gn rename build/android/{gyp/resources_shrinker/Shrinker.java => unused_resources/UnusedResources.java} (88%) create mode 100755 build/chromeos/generate_skylab_deps.py create mode 100755 build/chromeos/generate_skylab_deps_test.py delete mode 100755 build/cipd/clobber_cipd_root.py create mode 100755 build/clobber_unittest.py create mode 100644 build/config/android/DIR_METADATA create mode 100644 build/config/android/create_unwind_table.gni create mode 100644 build/config/android/system_image.gni delete mode 100644 build/config/android/test/classpath_order/BUILD.gn delete mode 100644 build/config/android/test/classpath_order/java/res_template/values/values.xml delete mode 100644 build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/ClassPathOrderTest.java delete mode 100644 build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/Dummy.java.jinja2 create mode 100644 build/config/apple/BUILD.gn mode change 100644 => 100755 build/config/apple/sdk_info.py create mode 100644 build/config/chromebox_for_meetings/BUILD.gn create mode 100644 build/config/chromebox_for_meetings/OWNERS create mode 100644 build/config/chromebox_for_meetings/README.md create mode 100644 build/config/chromebox_for_meetings/buildflags.gni create mode 100644 build/config/chromecast/OWNERS create mode 100644 build/config/cronet/OWNERS create mode 100644 build/config/cronet/config.gni delete mode 100644 build/config/crypto.gni create mode 100644 build/config/devtools.gni delete mode 100644 build/config/fuchsia/add_DebugData_service.test-cmx delete mode 100644 build/config/fuchsia/build_cmx_from_fragment.py create mode 100644 build/config/fuchsia/fuchsia_package_metadata.gni delete mode 100644 build/config/fuchsia/gfx_tests.cmx delete mode 100644 build/config/fuchsia/package.gni create mode 100644 build/config/fuchsia/packaged_content_embedder_excluded_dirs.gni delete mode 100644 build/config/fuchsia/rules.gni create mode 100644 build/config/fuchsia/size_optimized_cast_receiver_args.gn create mode 100644 build/config/fuchsia/size_optimized_cast_receiver_args_internal.gn delete mode 100644 build/config/fuchsia/test/access_test_data_dir.test-cmx create mode 100644 build/config/fuchsia/test/archivist.shard.test-cml delete mode 100644 build/config/fuchsia/test/audio_capabilities.test-cmx create mode 100644 build/config/fuchsia/test/audio_output.shard.test-cml create mode 100644 build/config/fuchsia/test/chromium_system_test_facet.shard.test-cml create mode 100644 build/config/fuchsia/test/chromium_test_facet.shard.test-cml create mode 100644 build/config/fuchsia/test/context_provider.shard.test-cml create mode 100644 build/config/fuchsia/test/elf_test_ambient_exec_runner.shard.test-cml create mode 100644 build/config/fuchsia/test/elf_test_runner.shard.test-cml delete mode 100644 build/config/fuchsia/test/font_capabilities.test-cmx create mode 100644 build/config/fuchsia/test/fonts.shard.test-cml create mode 100644 build/config/fuchsia/test/gfx_test_ui_stack.shard.test-cml delete mode 100644 build/config/fuchsia/test/jit_capabilities.test-cmx create mode 100644 build/config/fuchsia/test/logger.shard.test-cml create mode 100644 build/config/fuchsia/test/mark_vmo_executable.shard.test-cml create mode 100644 build/config/fuchsia/test/minimum.shard.test-cml delete mode 100644 build/config/fuchsia/test/minimum_capabilities.test-cmx create mode 100644 build/config/fuchsia/test/network.shard.test-cml delete mode 100644 build/config/fuchsia/test/network_capabilities.test-cmx create mode 100644 build/config/fuchsia/test/platform_video_codecs.shard.test-cml create mode 100644 build/config/fuchsia/test/present_view.shard.test-cml delete mode 100644 build/config/fuchsia/test/present_view_capabilities.test-cmx delete mode 100644 build/config/fuchsia/test/read_debug_data.test-cmx create mode 100644 build/config/fuchsia/test/sysmem.shard.test-cml create mode 100644 build/config/fuchsia/test/system_test_minimum.shard.test-cml create mode 100644 build/config/fuchsia/test/test_fonts.shard.test-cml delete mode 100644 build/config/fuchsia/test/test_logger_capabilities.test-cmx create mode 100644 build/config/fuchsia/test/test_ui_stack.shard.test-cml delete mode 100644 build/config/fuchsia/test/vulkan_capabilities.test-cmx delete mode 100644 build/config/fuchsia/test/web_engine_required_capabilities.test-cmx create mode 100644 build/config/fuchsia/test/web_instance.shard.test-cml create mode 100644 build/config/ios/bundle_data_from_filelist.gni create mode 100644 build/config/ios/ios_test_runner_xcuitest.gni create mode 100644 build/config/ios/swift_source_set.gni create mode 100644 build/config/nacl/host_toolchain.gni delete mode 100644 build/config/posix/sysroot_ld_path.py create mode 100644 build/config/riscv.gni create mode 100644 build/config/rust.gni create mode 100644 build/config/siso/.gitignore create mode 100644 build/config/siso/OWNERS create mode 100644 build/config/siso/README.md create mode 100644 build/config/siso/clang_linux.star create mode 100755 build/config/siso/configure_siso.py create mode 100644 build/config/siso/linux.star create mode 100644 build/config/siso/mac.star create mode 100644 build/config/siso/main.star create mode 100644 build/config/siso/mojo.star create mode 100644 build/config/siso/nacl_linux.star create mode 100644 build/config/siso/remote_exec_wrapper.star create mode 100644 build/config/siso/simple.star create mode 100644 build/config/siso/windows.star delete mode 100644 build/config/x64.gni create mode 100644 build/config/zos/BUILD.gn create mode 100755 build/del_ninja_deps_cache.py create mode 100644 build/fuchsia/COMMON_METADATA create mode 100644 build/fuchsia/PRESUBMIT.py create mode 100644 build/fuchsia/SECURITY_OWNERS delete mode 100644 build/fuchsia/aemu_target.py delete mode 100644 build/fuchsia/amber_repo.py create mode 100755 build/fuchsia/binary_size_differ.py create mode 100755 build/fuchsia/binary_size_differ_test.py delete mode 100644 build/fuchsia/boot_data.py create mode 100644 build/fuchsia/cipd/BUILD.gn create mode 100644 build/fuchsia/cipd/DIR_METADATA create mode 100644 build/fuchsia/cipd/README.md create mode 100644 build/fuchsia/cipd/version.template delete mode 100644 build/fuchsia/common.py delete mode 100644 build/fuchsia/common_args.py delete mode 100755 build/fuchsia/deploy_to_amber_repo.py delete mode 100644 build/fuchsia/device_target.py delete mode 100644 build/fuchsia/emu_target.py create mode 100644 build/fuchsia/gcs_download.py create mode 100755 build/fuchsia/gcs_download_test.py delete mode 100644 build/fuchsia/generic_x64_target.py delete mode 100644 build/fuchsia/linux.sdk.sha1 create mode 100644 build/fuchsia/linux_internal.sdk.sha1 delete mode 100644 build/fuchsia/mac.sdk.sha1 delete mode 100644 build/fuchsia/net_test_server.py delete mode 100644 build/fuchsia/qemu_image.py delete mode 100644 build/fuchsia/qemu_target.py delete mode 100755 build/fuchsia/qemu_target_test.py delete mode 100644 build/fuchsia/remote_cmd.py delete mode 100644 build/fuchsia/run_test_package.py delete mode 100644 build/fuchsia/runner_exceptions.py delete mode 100644 build/fuchsia/runner_logs.py delete mode 100644 build/fuchsia/symbolizer.py delete mode 100644 build/fuchsia/target.py create mode 100644 build/fuchsia/test/.coveragerc create mode 100644 build/fuchsia/test/.style.yapf create mode 100644 build/fuchsia/test/OWNERS create mode 100644 build/fuchsia/test/PRESUBMIT.py create mode 100644 build/fuchsia/test/base_ermine_ctl.py create mode 100755 build/fuchsia/test/base_ermine_ctl_unittests.py create mode 100644 build/fuchsia/test/common.py create mode 100755 build/fuchsia/test/common_unittests.py create mode 100644 build/fuchsia/test/compatible_utils.py create mode 100755 build/fuchsia/test/compatible_utils_unittests.py create mode 100755 build/fuchsia/test/coveragetest.py create mode 100755 build/fuchsia/test/deploy_to_fuchsia.py create mode 100755 build/fuchsia/test/deploy_to_fuchsia_unittests.py create mode 100644 build/fuchsia/test/ermine_ctl.py create mode 100644 build/fuchsia/test/ffx_emulator.py create mode 100755 build/fuchsia/test/ffx_emulator_unittests.py create mode 100644 build/fuchsia/test/ffx_integration.py create mode 100755 build/fuchsia/test/flash_device.py create mode 100755 build/fuchsia/test/flash_device_unittests.py create mode 100644 build/fuchsia/test/lockfile.py create mode 100755 build/fuchsia/test/log_manager.py create mode 100755 build/fuchsia/test/log_manager_unittests.py create mode 100755 build/fuchsia/test/publish_package.py create mode 100755 build/fuchsia/test/publish_package_unittests.py create mode 100644 build/fuchsia/test/pylintrc create mode 100644 build/fuchsia/test/run_blink_test.py create mode 100755 build/fuchsia/test/run_executable_test.py create mode 100755 build/fuchsia/test/run_pytype.py create mode 100644 build/fuchsia/test/run_telemetry_test.py create mode 100755 build/fuchsia/test/run_test.py create mode 100644 build/fuchsia/test/run_webpage_test.py create mode 100755 build/fuchsia/test/serve_repo.py create mode 100755 build/fuchsia/test/serve_repo_unittests.py create mode 100755 build/fuchsia/test/start_emulator.py create mode 100644 build/fuchsia/test/test_runner.py create mode 100644 build/fuchsia/test/test_server.py create mode 100755 build/fuchsia/test/test_server_unittests.py delete mode 100755 build/fuchsia/test_runner.py create mode 100755 build/fuchsia/update_images_test.py create mode 100755 build/fuchsia/update_product_bundles.py create mode 100755 build/fuchsia/update_product_bundles_test.py create mode 100755 build/fuchsia/update_sdk_test.py mode change 100644 => 100755 build/gn_helpers_unittest.py delete mode 100755 build/install-build-deps-android.sh create mode 100644 build/ios/PRESUBMIT.py create mode 100644 build/ios/extension_bundle_data.gni create mode 100644 build/ios/presubmit_support.py create mode 100755 build/ios/presubmit_support_test.py create mode 100644 build/ios/test_data/bar.html create mode 100644 build/ios/test_data/basic.filelist create mode 100644 build/ios/test_data/basic.globlist create mode 100644 build/ios/test_data/comment.filelist create mode 100644 build/ios/test_data/comment.globlist create mode 100644 build/ios/test_data/different_local_path.filelist create mode 100644 build/ios/test_data/different_local_path.globlist create mode 100644 build/ios/test_data/duplicates.filelist create mode 100644 build/ios/test_data/duplicates.globlist create mode 100644 build/ios/test_data/exclusions.filelist create mode 100644 build/ios/test_data/exclusions.globlist create mode 100644 build/ios/test_data/extra.filelist create mode 100644 build/ios/test_data/extra.globlist create mode 100644 build/ios/test_data/foo.css create mode 100644 build/ios/test_data/ignore_outside_globlist_dir.filelist create mode 100644 build/ios/test_data/ignore_outside_globlist_dir.globlist create mode 100644 build/ios/test_data/missing.filelist create mode 100644 build/ios/test_data/missing.globlist create mode 100644 build/ios/test_data/outside_globlist_dir.filelist create mode 100644 build/ios/test_data/outside_globlist_dir.globlist create mode 100644 build/ios/test_data/reorder.filelist create mode 100644 build/ios/test_data/reorder.globlist create mode 100644 build/ios/test_data/repository_relative.filelist create mode 100644 build/ios/test_data/repository_relative.globlist create mode 100644 build/ios/test_data/subdirectory/baz.txt create mode 100755 build/ios/update_bundle_filelist.py create mode 100644 build/lacros/README.md delete mode 100755 build/linux/sysroot_ld_path.sh delete mode 100755 build/linux/sysroot_scripts/find_incompatible_glibc_symbols.py delete mode 100755 build/linux/sysroot_scripts/find_incompatible_glibc_symbols_unittest.py rename build/linux/sysroot_scripts/{generate_debian_archive_unstable_gpg.sh => generate_keyring.sh} (71%) create mode 100644 build/linux/sysroot_scripts/generated_package_lists/bullseye.amd64 create mode 100644 build/linux/sysroot_scripts/generated_package_lists/bullseye.arm create mode 100644 build/linux/sysroot_scripts/generated_package_lists/bullseye.arm64 create mode 100644 build/linux/sysroot_scripts/generated_package_lists/bullseye.armel create mode 100644 build/linux/sysroot_scripts/generated_package_lists/bullseye.i386 create mode 100644 build/linux/sysroot_scripts/generated_package_lists/bullseye.mips64el create mode 100644 build/linux/sysroot_scripts/generated_package_lists/bullseye.mipsel delete mode 100644 build/linux/sysroot_scripts/generated_package_lists/sid.amd64 delete mode 100644 build/linux/sysroot_scripts/generated_package_lists/sid.arm delete mode 100644 build/linux/sysroot_scripts/generated_package_lists/sid.arm64 delete mode 100644 build/linux/sysroot_scripts/generated_package_lists/sid.armel delete mode 100644 build/linux/sysroot_scripts/generated_package_lists/sid.i386 delete mode 100644 build/linux/sysroot_scripts/generated_package_lists/sid.mips64el delete mode 100644 build/linux/sysroot_scripts/generated_package_lists/sid.mipsel rename build/linux/sysroot_scripts/{debian_archive_unstable.gpg => keyring.gpg} (86%) delete mode 100644 build/linux/sysroot_scripts/libdbus-1-3-symbols delete mode 100644 build/linux/sysroot_scripts/libxkbcommon0-symbols create mode 100755 build/linux/sysroot_scripts/reversion_glibc.py rename build/linux/sysroot_scripts/{sysroot-creator-sid.sh => sysroot-creator-bullseye.sh} (73%) create mode 100644 build/linux/unbundle/absl_algorithm.gn create mode 100644 build/linux/unbundle/absl_base.gn create mode 100644 build/linux/unbundle/absl_cleanup.gn create mode 100644 build/linux/unbundle/absl_container.gn create mode 100644 build/linux/unbundle/absl_debugging.gn create mode 100644 build/linux/unbundle/absl_flags.gn create mode 100644 build/linux/unbundle/absl_functional.gn create mode 100644 build/linux/unbundle/absl_hash.gn create mode 100644 build/linux/unbundle/absl_log.gn create mode 100644 build/linux/unbundle/absl_log_internal.gn create mode 100644 build/linux/unbundle/absl_memory.gn create mode 100644 build/linux/unbundle/absl_meta.gn create mode 100644 build/linux/unbundle/absl_numeric.gn create mode 100644 build/linux/unbundle/absl_random.gn create mode 100644 build/linux/unbundle/absl_status.gn create mode 100644 build/linux/unbundle/absl_strings.gn create mode 100644 build/linux/unbundle/absl_synchronization.gn create mode 100644 build/linux/unbundle/absl_time.gn create mode 100644 build/linux/unbundle/absl_types.gn create mode 100644 build/linux/unbundle/absl_utility.gn create mode 100644 build/linux/unbundle/brotli.gn create mode 100644 build/linux/unbundle/crc32c.gn create mode 100644 build/linux/unbundle/dav1d.gn create mode 100644 build/linux/unbundle/double-conversion.gn create mode 100644 build/linux/unbundle/jsoncpp.gn create mode 100644 build/linux/unbundle/libXNVCtrl.gn create mode 100644 build/linux/unbundle/libaom.gn create mode 100644 build/linux/unbundle/libavif.gn create mode 100644 build/linux/unbundle/libyuv.gn create mode 100644 build/linux/unbundle/swiftshader-SPIRV-Headers.gn create mode 100644 build/linux/unbundle/swiftshader-SPIRV-Tools.gn create mode 100644 build/linux/unbundle/vulkan-SPIRV-Headers.gn create mode 100644 build/linux/unbundle/vulkan-SPIRV-Tools.gn create mode 100644 build/linux/unbundle/woff2.gn rename build/{lacros => }/metadata.json.in (100%) create mode 100644 build/private_code_test/BUILD.gn create mode 100644 build/private_code_test/README.md create mode 100755 build/private_code_test/list_gclient_deps.py create mode 100644 build/private_code_test/private_code_test.gni create mode 100755 build/private_code_test/private_code_test.py create mode 100644 build/rust/BUILD.gn create mode 100644 build/rust/OWNERS create mode 100644 build/rust/analyze.gni create mode 100644 build/rust/cargo_crate.gni create mode 100755 build/rust/collect_rust_sources.py create mode 100644 build/rust/filter_clang_args.py create mode 100644 build/rust/rs_bindings_from_cc.gni create mode 100755 build/rust/run_bindgen.py create mode 100755 build/rust/run_build_script.py create mode 100755 build/rust/run_rs_bindings_from_cc.py create mode 100644 build/rust/rust_bindgen.gni create mode 100644 build/rust/rust_executable.gni create mode 100644 build/rust/rust_macro.gni create mode 100644 build/rust/rust_shared_library.gni create mode 100644 build/rust/rust_static_library.gni create mode 100644 build/rust/rust_target.gni create mode 100644 build/rust/rust_unit_test.gni create mode 100644 build/rust/rust_unit_tests_group.gni create mode 100755 build/rust/rustc_wrapper.py create mode 100644 build/rust/std/BUILD.gn create mode 100644 build/rust/std/fake_root/.cargo/config.toml create mode 100644 build/rust/std/fake_root/.gitignore create mode 100644 build/rust/std/fake_root/Cargo.toml create mode 100644 build/rust/std/fake_root/README.md create mode 100644 build/rust/std/fake_root/src/main.rs create mode 100755 build/rust/std/find_std_rlibs.py create mode 100644 build/rust/std/gnrt_config.toml create mode 100644 build/rust/std/immediate_crash.h create mode 100644 build/rust/std/remap_alloc.cc create mode 100644 build/rust/std/rules/BUILD.gn create mode 100644 build/rust/tests/BUILD.gn create mode 100644 build/rust/tests/bindgen_test/BUILD.gn create mode 100644 build/rust/tests/bindgen_test/lib.c create mode 100644 build/rust/tests/bindgen_test/lib.h create mode 100644 build/rust/tests/bindgen_test/lib2.h create mode 100644 build/rust/tests/bindgen_test/main.rs create mode 100644 build/rust/tests/bindgen_test/src/lib.rs create mode 100644 build/rust/tests/test_aliased_deps/BUILD.gn create mode 100644 build/rust/tests/test_aliased_deps/lib.rs create mode 100644 build/rust/tests/test_aliased_deps/main.rs create mode 100644 build/rust/tests/test_aliased_deps/real_name.rs create mode 100644 build/rust/tests/test_bin_crate/BUILD.gn create mode 100644 build/rust/tests/test_bin_crate/crate/build.rs create mode 100644 build/rust/tests/test_bin_crate/crate/src/main.rs create mode 100644 build/rust/tests/test_control_flow_guard/BUILD.gn create mode 100644 build/rust/tests/test_control_flow_guard/test_control_flow_guard.rs create mode 100644 build/rust/tests/test_cpp_including_rust/BUILD.gn create mode 100644 build/rust/tests/test_cpp_including_rust/main.cc create mode 100644 build/rust/tests/test_cpp_including_rust/unittests.cc create mode 100644 build/rust/tests/test_local_std/BUILD.gn create mode 100644 build/rust/tests/test_local_std/lib.rs create mode 100644 build/rust/tests/test_local_std/main.rs create mode 100644 build/rust/tests/test_proc_macro_crate/BUILD.gn create mode 100644 build/rust/tests/test_proc_macro_crate/crate/src/lib.rs create mode 100644 build/rust/tests/test_rlib_crate/BUILD.gn create mode 100644 build/rust/tests/test_rlib_crate/crate/build.rs create mode 100644 build/rust/tests/test_rlib_crate/crate/src/lib.rs create mode 100644 build/rust/tests/test_rlib_crate/crate/src/main.rs create mode 100644 build/rust/tests/test_rs_bindings_from_cc/BUILD.gn create mode 100644 build/rust/tests/test_rs_bindings_from_cc/main.rs create mode 100644 build/rust/tests/test_rs_bindings_from_cc/self_contained_target_header1.h create mode 100644 build/rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.cc create mode 100644 build/rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.h create mode 100644 build/rust/tests/test_rs_bindings_from_cc/target_depending_on_another.h create mode 100644 build/rust/tests/test_rust_exe/BUILD.gn create mode 100644 build/rust/tests/test_rust_exe/main.rs create mode 100644 build/rust/tests/test_rust_multiple_dep_versions_exe/BUILD.gn create mode 100644 build/rust/tests/test_rust_multiple_dep_versions_exe/main.rs create mode 100644 build/rust/tests/test_rust_multiple_dep_versions_exe/transitive_lib.rs create mode 100644 build/rust/tests/test_rust_multiple_dep_versions_exe/v1/BUILD.gn create mode 100644 build/rust/tests/test_rust_multiple_dep_versions_exe/v1/src/lib.rs create mode 100644 build/rust/tests/test_rust_multiple_dep_versions_exe/v2/BUILD.gn create mode 100644 build/rust/tests/test_rust_multiple_dep_versions_exe/v2/src/lib.rs create mode 100644 build/rust/tests/test_rust_shared_library/BUILD.gn create mode 100644 build/rust/tests/test_rust_shared_library/src/lib.rs create mode 100644 build/rust/tests/test_rust_static_library/BUILD.gn create mode 100644 build/rust/tests/test_rust_static_library/src/lib.rs create mode 100644 build/rust/tests/test_rust_static_library_non_standard_arrangement/BUILD.gn create mode 100644 build/rust/tests/test_rust_static_library_non_standard_arrangement/foo.rs create mode 100644 build/rust/tests/test_rust_unittests/BUILD.gn create mode 100644 build/rust/tests/test_rust_unittests/main.rs create mode 100644 build/rust/tests/test_serde_json_lenient/BUILD.gn create mode 100644 build/rust/tests/test_serde_json_lenient/lib.rs create mode 100644 build/rust/tests/test_serde_json_lenient/unittests.cc create mode 100644 build/rust/tests/test_simple_rust_exe/BUILD.gn create mode 100644 build/rust/tests/test_simple_rust_exe/main.rs create mode 100755 build/skia_gold_common/run_pytype.py create mode 100644 build/toolchain/android/DIR_METADATA create mode 100644 build/toolchain/apple/.style.yapf create mode 100644 build/toolchain/whole_archive.py create mode 100644 build/toolchain/win/toolchain.gni create mode 100644 build/toolchain/win/win_toolchain_data.gni create mode 100644 build/toolchain/zos/BUILD.gn create mode 100755 build/util/action_remote.py mode change 100644 => 100755 build/util/android_chrome_version.py create mode 100644 build/util/chromium_git_revision.h.in create mode 100644 build/util/lib/__init__.py create mode 100644 build/util/lib/results/DIR_METADATA create mode 100644 build/util/lib/results/OWNERS create mode 100644 build/util/lib/results/__init__.py rename build/{android/pylib/base => util/lib/results}/result_sink.py (50%) create mode 100755 build/util/lib/results/result_sink_test.py create mode 100644 build/util/lib/results/result_types.py delete mode 100644 build/util/python2_action.py delete mode 100644 build/util/version.gni delete mode 100644 build/util/webkit_version.h.in delete mode 100755 build/write_build_date_header.py create mode 100644 build/zip_helpers.py create mode 100755 build/zip_helpers_unittest.py create mode 100644 build_overrides/crypto.gni rename {build => build_overrides}/util/is_python2.py (77%) diff --git a/build/.gitignore b/build/.gitignore index 2e963395b77b..22046984f517 100644 --- a/build/.gitignore +++ b/build/.gitignore @@ -5,6 +5,7 @@ ciopfs /android/bin /android/binary_size/apks/**/*.apk /args/chromeos/*.gni +/args/chromeos/rewrapper* /config/gclient_args.gni /cros_cache/ /Debug diff --git a/build/BUILD.gn b/build/BUILD.gn index 51ef9b08758e..663413278785 100644 --- a/build/BUILD.gn +++ b/build/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -7,6 +7,9 @@ import("//build/config/chrome_build.gni") import("//build/config/chromecast_build.gni") import("//build/config/chromeos/args.gni") import("//build/config/chromeos/ui_mode.gni") +import("//build/config/features.gni") +import("//build/util/process_version.gni") +import("//build_overrides/build.gni") source_set("buildflag_header_h") { sources = [ "buildflag.h" ] @@ -28,10 +31,20 @@ buildflag_header("branding_buildflags") { } } +buildflag_header("blink_buildflags") { + header = "blink_buildflags.h" + flags = [ "USE_BLINK=$use_blink" ] +} + buildflag_header("chromecast_buildflags") { header = "chromecast_buildflags.h" - flags = [ "IS_CHROMECAST=$is_chromecast" ] + flags = [ + "IS_CHROMECAST=$is_chromecast", + "IS_CASTOS=$is_castos", + "IS_CAST_ANDROID=$is_cast_android", + "ENABLE_CAST_RECEIVER=$enable_cast_receiver", + ] } buildflag_header("chromeos_buildflags") { @@ -42,5 +55,27 @@ buildflag_header("chromeos_buildflags") { "IS_CHROMEOS_LACROS=$is_chromeos_lacros", "IS_CHROMEOS_ASH=$is_chromeos_ash", + "IS_CHROMEOS_WITH_HW_DETAILS=$is_chromeos_with_hw_details", + "IS_REVEN=$is_reven", ] } + +if (build_with_chromium) { + group("gold_common_pytype") { + testonly = true + + data = [ "//build/skia_gold_common/" ] + + data_deps = [ "//testing:pytype_dependencies" ] + } +} + +if (is_chromeos) { + process_version("version_metadata") { + sources = [ "//chrome/VERSION" ] + + template_file = "metadata.json.in" + output = "$root_out_dir/metadata.json" + process_only = true + } +} diff --git a/build/METADATA b/build/METADATA deleted file mode 100644 index ef1b5041d990..000000000000 --- a/build/METADATA +++ /dev/null @@ -1,20 +0,0 @@ -name: "build" -description: - "Subtree at build." - -third_party { - url { - type: LOCAL_SOURCE - value: "/build_mirror" - } - url { - type: GIT - value: "https://chromium.googlesource.com/chromium/src" - } - version: "92.0.4515.157" - last_upgrade_date { - year: 2021 - month: 5 - day: 4 - } -} diff --git a/build/OWNERS b/build/OWNERS index 405ccdcf5a21..dce9d5593800 100644 --- a/build/OWNERS +++ b/build/OWNERS @@ -1,26 +1,34 @@ set noparent # NOTE: keep this in sync with lsc-owners-override@chromium.org owners +# by emailing lsc-policy@chromium.org when this list changes. agrieve@chromium.org brucedawson@chromium.org dpranke@google.com jochen@chromium.org +sdefresne@chromium.org thakis@chromium.org thomasanderson@chromium.org tikuta@chromium.org # Clang build config changes: -hans@chromium.org +file://tools/clang/scripts/OWNERS # For java build changes: +smaier@chromium.org wnwen@chromium.org # NOTE: keep this in sync with lsc-owners-override@chromium.org owners +# by emailing lsc-policy@chromium.org when this list changes. -per-file .gitignore=* -per-file check_gn_headers_whitelist.txt=* +# Mac build changes: per-file mac_toolchain.py=erikchen@chromium.org per-file mac_toolchain.py=justincohen@chromium.org +per-file mac_toolchain.py=file://build/mac/OWNERS +per-file xcode_binaries.yaml=file://build/mac/OWNERS + +per-file .gitignore=* +per-file check_gn_headers_whitelist.txt=* per-file whitespace_file.txt=* per-file OWNERS.status=* per-file OWNERS.setnoparent=set noparent -per-file OWNERS.setnoparent=file://ENG_REVIEW_OWNERS +per-file OWNERS.setnoparent=file://ATL_OWNERS diff --git a/build/OWNERS.setnoparent b/build/OWNERS.setnoparent index 0d473a4eacd9..52755b51367c 100644 --- a/build/OWNERS.setnoparent +++ b/build/OWNERS.setnoparent @@ -2,15 +2,15 @@ # docs/code_reviews.md#owners-file-details for more details. # Overall project governance. -file://ENG_REVIEW_OWNERS +file://ATL_OWNERS # Third-party dependency review, see //docs/adding_to_third_party.md file://third_party/OWNERS # Security reviews +file://build/fuchsia/SECURITY_OWNERS file://chromeos/SECURITY_OWNERS -file://content/browser/SITE_ISOLATION_OWNERS -file://fuchsia/SECURITY_OWNERS +file://content/browser/CHILD_PROCESS_SECURITY_POLICY_OWNERS file://ipc/SECURITY_OWNERS file://net/base/SECURITY_OWNERS file://sandbox/linux/OWNERS @@ -28,6 +28,9 @@ file://base/metrics/OWNERS # expose to the open web. file://third_party/blink/API_OWNERS +# third_party/blink/web_tests/VirtualTestSuites need special care. +file://third_party/blink/web_tests/VIRTUAL_OWNERS + # Extension related files. file://chrome/browser/extensions/component_extensions_allowlist/EXTENSION_ALLOWLIST_OWNERS file://extensions/common/api/API_OWNERS @@ -42,7 +45,7 @@ file://ui/android/java/res/LAYOUT_OWNERS # Chrome and Chrome OS). # The rules are documented at: # https://sites.google.com/a/chromium.org/dev/developers/how-tos/enterprise/adding-new-policies -file://components/policy/resources/ENTERPRISE_POLICY_OWNERS +file://components/policy/ENTERPRISE_POLICY_OWNERS # This restriction is in place due to the complicated compliance regulations # around this code. @@ -60,3 +63,23 @@ file://weblayer/API_OWNERS # New features for lock/login UI on Chrome OS need to work stably in all corner # cases. file://ash/login/LOGIN_LOCK_OWNERS + +# Changes to the CQ/CI configuration can have a significant impact on infra cost +# and performance. Approval should be limited to a small subset of the users +# that can make infra changes. +file://infra/config/groups/cq-usage/CQ_USAGE_OWNERS +file://infra/config/groups/sheriff-rotations/CHROMIUM_OWNERS + +# Origin Trials owners are responsible for determining trials that need to be +# completed manually. +file://third_party/blink/common/origin_trials/OT_OWNERS + +# New notifiers added to //ash/constants/notifier_catalogs.h and +# //ash/constants/quick_settings_catalogs.h should be reviewed +# by //ash/system owners to ensure that the correct notifier is being used. +file://ash/system/OWNERS + +# WebUI surfaces are user visible and frequently are kept around indefinitely. +# New WebUI additions should be reviewed by WebUI PLATFORM_OWNERS to ensure +# they follow the guidance at https://www.chromium.org/developers/webui +file://ui/webui/PLATFORM_OWNERS diff --git a/build/PRESUBMIT.py b/build/PRESUBMIT.py new file mode 100644 index 000000000000..fba4d3288767 --- /dev/null +++ b/build/PRESUBMIT.py @@ -0,0 +1,57 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +PRESUBMIT_VERSION = '2.0.0' + +# This line is 'magic' in that git-cl looks for it to decide whether to +# use Python3 instead of Python2 when running the code in this file. +USE_PYTHON3 = True + +import textwrap + + +def CheckNoBadDeps(input_api, output_api): + """Prevent additions of bad dependencies from the //build prefix.""" + build_file_patterns = [ + r'(.+/)?BUILD\.gn', + r'.+\.gni', + ] + blocklist_pattern = input_api.re.compile(r'^[^#]*"//(?!build).+?/.*"') + allowlist_pattern = input_api.re.compile(r'^[^#]*"//third_party/junit') + + warning_message = textwrap.dedent(""" + The //build directory is meant to be as hermetic as possible so that + other projects (webrtc, v8, angle) can make use of it. If you are adding + a new dep from //build onto another directory, you should consider: + 1) Can that dep live within //build? + 2) Can the dep be guarded by "build_with_chromium"? + 3) Have you made this new dep easy to pull in for other projects (ideally + a matter of adding a DEPS entry).:""") + + def FilterFile(affected_file): + return input_api.FilterSourceFile(affected_file, + files_to_check=build_file_patterns) + + problems = [] + for f in input_api.AffectedSourceFiles(FilterFile): + local_path = f.LocalPath() + for line_number, line in f.ChangedContents(): + if blocklist_pattern.search(line) and not allowlist_pattern.search(line): + problems.append('%s:%d\n %s' % + (local_path, line_number, line.strip())) + if problems: + return [output_api.PresubmitPromptOrNotify(warning_message, problems)] + else: + return [] + + +def CheckPythonTests(input_api, output_api): + return input_api.RunTests( + input_api.canned_checks.GetUnitTestsInDirectory( + input_api, + output_api, + input_api.PresubmitLocalPath(), + files_to_check=[r'.+_(?:unit)?test\.py$'], + run_on_python2=False, + run_on_python3=True)) diff --git a/build/PRESUBMIT_test.py b/build/PRESUBMIT_test.py new file mode 100755 index 000000000000..c5065f4f1136 --- /dev/null +++ b/build/PRESUBMIT_test.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import sys +import unittest + +import PRESUBMIT + +sys.path.append(os.path.join(os.path.dirname(__file__), '..')) + +from PRESUBMIT_test_mocks import MockAffectedFile +from PRESUBMIT_test_mocks import MockInputApi, MockOutputApi + +USE_PYTHON3 = True + + +def _fails_deps_check(line, filename='BUILD.gn'): + mock_input_api = MockInputApi() + mock_input_api.files = [MockAffectedFile(filename, [line])] + errors = PRESUBMIT.CheckNoBadDeps(mock_input_api, MockOutputApi()) + return bool(errors) + + +class CheckNoBadDepsTest(unittest.TestCase): + def testComments(self): + self.assertFalse(_fails_deps_check('no # import("//third_party/foo")')) + + def testFiles(self): + self.assertFalse( + _fails_deps_check('import("//third_party/foo")', filename='foo.txt')) + self.assertTrue( + _fails_deps_check('import("//third_party/foo")', filename='foo.gni')) + + def testPaths(self): + self.assertFalse(_fails_deps_check('import("//build/things.gni")')) + self.assertTrue(_fails_deps_check('import("//chrome/things.gni")')) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/action_helpers.py b/build/action_helpers.py new file mode 100644 index 000000000000..046a292baf6e --- /dev/null +++ b/build/action_helpers.py @@ -0,0 +1,126 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Helper functions useful when writing scripts used by action() targets.""" + +import contextlib +import filecmp +import os +import pathlib +import posixpath +import shutil +import tempfile + +import gn_helpers + + +@contextlib.contextmanager +def atomic_output(path, mode='w+b', only_if_changed=True): + """Prevent half-written files and dirty mtimes for unchanged files. + + Args: + path: Path to the final output file, which will be written atomically. + mode: The mode to open the file in (str). + only_if_changed: Whether to maintain the mtime if the file has not changed. + Returns: + A Context Manager that yields a NamedTemporaryFile instance. On exit, the + manager will check if the file contents is different from the destination + and if so, move it into place. + + Example: + with action_helpers.atomic_output(output_path) as tmp_file: + subprocess.check_call(['prog', '--output', tmp_file.name]) + """ + # Create in same directory to ensure same filesystem when moving. + dirname = os.path.dirname(path) or '.' + os.makedirs(dirname, exist_ok=True) + with tempfile.NamedTemporaryFile(mode, + suffix=os.path.basename(path), + dir=dirname, + delete=False) as f: + try: + yield f + + # File should be closed before comparison/move. + f.close() + if not (only_if_changed and os.path.exists(path) + and filecmp.cmp(f.name, path)): + shutil.move(f.name, path) + finally: + f.close() + if os.path.exists(f.name): + os.unlink(f.name) + + +def add_depfile_arg(parser): + if hasattr(parser, 'add_option'): + func = parser.add_option + else: + func = parser.add_argument + func('--depfile', help='Path to depfile (refer to "gn help depfile")') + + +def write_depfile(depfile_path, first_gn_output, inputs=None): + """Writes a ninja depfile. + + See notes about how to use depfiles in //build/docs/writing_gn_templates.md. + + Args: + depfile_path: Path to file to write. + first_gn_output: Path of first entry in action's outputs. + inputs: List of inputs to add to depfile. + """ + assert depfile_path != first_gn_output # http://crbug.com/646165 + assert not isinstance(inputs, str) # Easy mistake to make + + def _process_path(path): + assert not os.path.isabs(path), f'Found abs path in depfile: {path}' + if os.path.sep != posixpath.sep: + path = str(pathlib.Path(path).as_posix()) + assert '\\' not in path, f'Found \\ in depfile: {path}' + return path.replace(' ', '\\ ') + + sb = [] + sb.append(_process_path(first_gn_output)) + if inputs: + # Sort and uniquify to ensure file is hermetic. + # One path per line to keep it human readable. + sb.append(': \\\n ') + sb.append(' \\\n '.join(sorted(_process_path(p) for p in set(inputs)))) + else: + sb.append(': ') + sb.append('\n') + + path = pathlib.Path(depfile_path) + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(''.join(sb)) + + +def parse_gn_list(value): + """Converts a "GN-list" command-line parameter into a list. + + Conversions handled: + * None -> [] + * '' -> [] + * 'asdf' -> ['asdf'] + * '["a", "b"]' -> ['a', 'b'] + * ['["a", "b"]', 'c'] -> ['a', 'b', 'c'] (action='append') + + This allows passing args like: + gn_list = [ "one", "two", "three" ] + args = [ "--items=$gn_list" ] + """ + # Convert None to []. + if not value: + return [] + # Convert a list of GN lists to a flattened list. + if isinstance(value, list): + ret = [] + for arg in value: + ret.extend(parse_gn_list(arg)) + return ret + # Convert normal GN list. + if value.startswith('['): + return gn_helpers.GNValueParser(value).ParseList() + # Convert a single string value to a list. + return [value] diff --git a/build/action_helpers_unittest.py b/build/action_helpers_unittest.py new file mode 100755 index 000000000000..6a9f90851bf0 --- /dev/null +++ b/build/action_helpers_unittest.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import pathlib +import shutil +import sys +import tempfile +import time +import unittest + +import action_helpers + + +class ActionHelpersTest(unittest.TestCase): + def test_atomic_output(self): + tmp_file = pathlib.Path(tempfile.mktemp()) + tmp_file.write_text('test') + try: + # Test that same contents does not change mtime. + orig_mtime = os.path.getmtime(tmp_file) + with action_helpers.atomic_output(str(tmp_file), 'wt') as af: + time.sleep(.01) + af.write('test') + + self.assertEqual(os.path.getmtime(tmp_file), orig_mtime) + + # Test that contents is written. + with action_helpers.atomic_output(str(tmp_file), 'wt') as af: + af.write('test2') + self.assertEqual(tmp_file.read_text(), 'test2') + self.assertNotEqual(os.path.getmtime(tmp_file), orig_mtime) + finally: + tmp_file.unlink() + + def test_parse_gn_list(self): + def test(value, expected): + self.assertEqual(action_helpers.parse_gn_list(value), expected) + + test(None, []) + test('', []) + test('asdf', ['asdf']) + test('["one"]', ['one']) + test(['["one"]', '["two"]'], ['one', 'two']) + test(['["one", "two"]', '["three"]'], ['one', 'two', 'three']) + + def test_write_depfile(self): + tmp_file = pathlib.Path(tempfile.mktemp()) + try: + + def capture_output(inputs): + action_helpers.write_depfile(str(tmp_file), 'output', inputs) + return tmp_file.read_text() + + self.assertEqual(capture_output(None), 'output: \n') + self.assertEqual(capture_output([]), 'output: \n') + self.assertEqual(capture_output(['a']), 'output: \\\n a\n') + # Check sorted. + self.assertEqual(capture_output(['b', 'a']), 'output: \\\n a \\\n b\n') + # Check converts to forward slashes. + self.assertEqual(capture_output(['a', os.path.join('b', 'c')]), + 'output: \\\n a \\\n b/c\n') + + # Arg should be a list. + with self.assertRaises(AssertionError): + capture_output('a') + + # Do not use depfile itself as an output. + with self.assertRaises(AssertionError): + capture_output([str(tmp_file)]) + + # Do not use absolute paths. + with self.assertRaises(AssertionError): + capture_output([os.path.sep + 'foo']) + + # Do not use absolute paths (output path). + with self.assertRaises(AssertionError): + action_helpers.write_depfile(str(tmp_file), '/output', []) + + finally: + tmp_file.unlink() + + +if __name__ == '__main__': + unittest.main() diff --git a/build/add_rts_filters.py b/build/add_rts_filters.py index 4186c39333ee..94297c550b9b 100755 --- a/build/add_rts_filters.py +++ b/build/add_rts_filters.py @@ -1,11 +1,12 @@ -#!/usr/bin/env python -# Copyright (c) 2021 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2021 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -"""Creates a dummy RTS filter file if a real one doesn't exist yes. - Real filter files are generated by the RTS binary for suites with any - skippable tests. The rest of the suites need to have dummy files because gn - will expect the file to be present. +"""Creates a dummy RTS filter file and a dummy inverse filter file if a + real ones do not exist yet. Real filter files (and their inverse) are + generated by the RTS binary for suites with any skippable tests. The + rest of the suites need to have dummy files because gn will expect the + file to be present. Implementation uses try / except because the filter files are written relatively close to when this code creates the dummy files. @@ -22,6 +23,15 @@ def main(): filter_file = sys.argv[1] + # '*' is a dummy that means run everything + write_filter_file(filter_file, '*') + + inverted_filter_file = sys.argv[2] + # '-*' is a dummy that means run nothing + write_filter_file(inverted_filter_file, '-*') + + +def write_filter_file(filter_file, filter_string): directory = os.path.dirname(filter_file) try: os.makedirs(directory) @@ -30,7 +40,6 @@ def main(): pass else: raise - try: fp = os.open(filter_file, os.O_CREAT | os.O_EXCL | os.O_WRONLY) except OSError as err: @@ -40,7 +49,7 @@ def main(): raise else: with os.fdopen(fp, 'w') as file_obj: - file_obj.write('*') # '*' is a dummy that means run everything + file_obj.write(filter_string) if __name__ == '__main__': diff --git a/build/android/AndroidManifest.xml b/build/android/AndroidManifest.xml index 3c4ed292e282..821108f8a177 100644 --- a/build/android/AndroidManifest.xml +++ b/build/android/AndroidManifest.xml @@ -5,14 +5,8 @@ LICENSE file. --> - - diff --git a/build/android/BUILD.gn b/build/android/BUILD.gn index 1be9f479f20d..4d035b8d3367 100644 --- a/build/android/BUILD.gn +++ b/build/android/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -16,36 +16,90 @@ if (enable_java_templates) { create_cache = true } - if (enable_jdk_library_desugaring) { - dex_jdk_libs("all_jdk_libs") { - output = "$target_out_dir/$target_name.l8.dex" - min_sdk_version = default_min_sdk_version - } - } - generate_build_config_srcjar("build_config_gen") { use_final_fields = false } - java_library("build_config_java") { - supports_android = true - srcjar_deps = [ ":build_config_gen" ] - jar_excluded_patterns = [ "*/build/BuildConfig.class" ] + generate_build_config_srcjar("build_config_for_testing_gen") { + use_final_fields = false + testonly = true } write_native_libraries_java("native_libraries_gen") { use_final_fields = false } - android_library("native_libraries_java") { - srcjar_deps = [ ":native_libraries_gen" ] + java_library("build_java") { + supports_android = true + srcjar_deps = [ + ":build_config_gen", + ":native_libraries_gen", + ] + sources = [ + "java/src/org/chromium/build/annotations/AlwaysInline.java", + "java/src/org/chromium/build/annotations/CheckDiscard.java", + "java/src/org/chromium/build/annotations/DoNotClassMerge.java", + "java/src/org/chromium/build/annotations/DoNotInline.java", + "java/src/org/chromium/build/annotations/DoNotStripLogs.java", + "java/src/org/chromium/build/annotations/IdentifierNameString.java", + "java/src/org/chromium/build/annotations/MainDex.java", + "java/src/org/chromium/build/annotations/MockedInTests.java", + "java/src/org/chromium/build/annotations/UsedByReflection.java", + ] + + jar_excluded_patterns = [ "*/build/BuildConfig.class" ] # New version of NativeLibraries.java (with the actual correct values) will # be created when creating an apk. - jar_excluded_patterns = [ "*/NativeLibraries.class" ] + jar_excluded_patterns += [ "*/NativeLibraries.class" ] + + proguard_configs = [ "chromium_annotations.flags" ] + } + + # Not all //build embedders pull in junit_binary deps that live in //third_party. + if (build_with_chromium) { + android_assets("junit_test_assets") { + testonly = true + + # We just need any file here, so use the test itself. + sources = [ "junit/src/org/chromium/build/AndroidAssetsTest.java" ] + } + android_resources("junit_test_resources") { + testonly = true + sources = [ "junit/res/values/strings.xml" ] + mergeable_android_manifests = [ "junit/AndroidManifest_mergetest.xml" ] + } + robolectric_binary("build_junit_tests") { + # Test has no JNI, so skip JNI Generator step. + generate_final_jni = false + resources_package = "org.chromium.build" + sources = [ + "junit/src/org/chromium/build/AndroidAssetsTest.java", + "junit/src/org/chromium/build/IncrementalJavacTest.java", + ] + deps = [ + ":junit_test_assets", + ":junit_test_resources", + "//build/android/test/incremental_javac_gn:no_signature_change_prebuilt_java", + "//third_party/junit", + ] + } } } +# TODO(go/turn-down-test-results): Remove once we turn down +# test-results.appspot.com +python_library("test_result_presentations_py") { + pydeps_file = "pylib/results/presentation/test_results_presentation.pydeps" + data = [ + "//build/android/pylib/results/presentation/template", + "//build/android/pylib/results/presentation/javascript/main_html.js", + "//third_party/catapult/third_party/gsutil/", + "//third_party/jinja2/debug.py", + "//third_party/six", + ] +} + python_library("devil_chromium_py") { pydeps_file = "devil_chromium.pydeps" data = [ @@ -68,9 +122,7 @@ group("apk_installer_data") { "//build/android/pylib/device/commands", "//tools/android/md5sum", ] - data = [ - "//third_party/android_build_tools/bundletool/bundletool-all-1.4.0.jar", - ] + data = [ "//third_party/android_build_tools/bundletool/bundletool.jar" ] } } @@ -79,24 +131,41 @@ python_library("apk_operations_py") { deps = [ ":apk_installer_data" ] } -python_library("test_runner_py") { +group("test_runner_py") { + testonly = true + deps = [ + ":test_runner_core_py", + ":test_runner_device_support", + ] +} + +python_library("test_runner_core_py") { testonly = true pydeps_file = "test_runner.pydeps" data = [ "pylib/gtest/filter/", "pylib/instrumentation/render_test.html.jinja", "test_wrapper/logdog_wrapper.py", - "${android_sdk_build_tools}/aapt", - "${android_sdk_build_tools}/dexdump", - "${android_sdk_build_tools}/lib64/libc++.so", - "${android_sdk_build_tools}/split-select", - "${android_sdk_root}/platform-tools/adb", "//third_party/requests/", ] + data_deps = [ ":logdog_wrapper_py" ] +} + +group("test_runner_device_support") { + testonly = true + + # We hardcode using these tools from the public sdk in devil_chromium.json and + # in pylib's constants. + data = [ + "${public_android_sdk_build_tools}/aapt", + "${public_android_sdk_build_tools}/dexdump", + "${public_android_sdk_build_tools}/lib64/libc++.so", + "${public_android_sdk_build_tools}/split-select", + "${public_android_sdk_root}/platform-tools/adb", + ] data_deps = [ ":apk_installer_data", ":devil_chromium_py", - ":logdog_wrapper_py", ":stack_tools", ] @@ -104,6 +173,9 @@ python_library("test_runner_py") { if (build_with_chromium) { data_deps += [ "//tools/android/forwarder2" ] data += [ "//tools/android/avd/proto/" ] + if (enable_chrome_android_internal) { + data += [ "//clank/tools/android/avd/proto/" ] + } if (is_asan) { data_deps += [ "//tools/android/asan/third_party:asan_device_setup" ] } @@ -111,7 +183,7 @@ python_library("test_runner_py") { # Proguard is needed only when using apks (rather than native executables). if (enable_java_templates) { - deps = [ "//build/android/stacktrace:java_deobfuscate" ] + data_deps += [ "//build/android/stacktrace:java_deobfuscate" ] } } @@ -125,9 +197,11 @@ python_library("resource_sizes_py") { ":devil_chromium_py", "//third_party/catapult/tracing:convert_chart_json", ] + data = [ build_vars_file, android_readelf, + rebase_path("$android_ndk_library_path/libc++.so.1", root_build_dir), ] } diff --git a/build/android/COMMON_METADATA b/build/android/COMMON_METADATA new file mode 100644 index 000000000000..7a2580a646c4 --- /dev/null +++ b/build/android/COMMON_METADATA @@ -0,0 +1 @@ +os: ANDROID diff --git a/build/android/DIR_METADATA b/build/android/DIR_METADATA index 7a2580a646c4..cdc2d6fb6eb6 100644 --- a/build/android/DIR_METADATA +++ b/build/android/DIR_METADATA @@ -1 +1 @@ -os: ANDROID +mixins: "//build/android/COMMON_METADATA" diff --git a/build/android/OWNERS b/build/android/OWNERS index 0b64bda0fff6..94fa76830228 100644 --- a/build/android/OWNERS +++ b/build/android/OWNERS @@ -1,7 +1,6 @@ +agrieve@chromium.org bjoyce@chromium.org -jbudorick@chromium.org mheikal@chromium.org pasko@chromium.org -skyostil@chromium.org -tiborg@chromium.org +smaier@chromium.org wnwen@chromium.org diff --git a/build/android/PRESUBMIT.py b/build/android/PRESUBMIT.py index 2cf0602cfca5..8348558c1fbd 100644 --- a/build/android/PRESUBMIT.py +++ b/build/android/PRESUBMIT.py @@ -1,4 +1,4 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -8,8 +8,16 @@ details on the presubmit API built into depot_tools. """ +USE_PYTHON3 = True + def CommonChecks(input_api, output_api): + # These tools don't run on Windows so these tests don't work and give many + # verbose and cryptic failure messages. Linting the code is also skipped on + # Windows because it will fail due to os differences. + if input_api.sys.platform == 'win32': + return [] + build_android_dir = input_api.PresubmitLocalPath() def J(*dirs): @@ -29,9 +37,7 @@ def J(*dirs): output_api, pylintrc='pylintrc', files_to_skip=[ - r'.*_pb2\.py', - r'.*list_java_targets\.py', # crbug.com/1100664 - r'.*fast_local_dev_server\.py', # crbug.com/1100664 + r'.*_pb2\.py' ] + build_pys, extra_paths_list=[ J(), @@ -46,7 +52,8 @@ def J(*dirs): J('..', '..', 'third_party', 'depot_tools'), J('..', '..', 'third_party', 'colorama', 'src'), J('..', '..', 'build'), - ])) + ], + version='2.7')) tests.extend( input_api.canned_checks.GetPylint( input_api, @@ -55,13 +62,27 @@ def J(*dirs): files_to_skip=[ r'.*_pb2\.py', r'.*_pb2\.py', + r'.*create_unwind_table\.py', + r'.*create_unwind_table_tests\.py', + ], + extra_paths_list=[J('gyp'), J('gn')], + version='2.7')) + + tests.extend( + input_api.canned_checks.GetPylint( + input_api, + output_api, + files_to_check=[ + r'.*create_unwind_table\.py', + r'.*create_unwind_table_tests\.py', ], - extra_paths_list=[J('gyp'), J('gn')])) + extra_paths_list=[J('gyp'), J('gn')], + version='2.7')) # yapf: enable # Disabled due to http://crbug.com/410936 #output.extend(input_api.canned_checks.RunUnitTestsInDirectory( - #input_api, output_api, J('buildbot', 'tests'))) + #input_api, output_api, J('buildbot', 'tests', skip_shebang_check=True))) pylib_test_env = dict(input_api.environ) pylib_test_env.update({ @@ -73,12 +94,7 @@ def J(*dirs): input_api, output_api, unit_tests=[ - J('.', 'emma_coverage_stats_test.py'), J('.', 'list_class_verification_failures_test.py'), - J('gyp', 'util', 'build_utils_test.py'), - J('gyp', 'util', 'manifest_utils_test.py'), - J('gyp', 'util', 'md5_check_test.py'), - J('gyp', 'util', 'resource_utils_test.py'), J('pylib', 'constants', 'host_paths_unittest.py'), J('pylib', 'gtest', 'gtest_test_instance_test.py'), J('pylib', 'instrumentation', @@ -93,20 +109,22 @@ def J(*dirs): J('pylib', 'output', 'noop_output_manager_test.py'), J('pylib', 'output', 'remote_output_manager_test.py'), J('pylib', 'results', 'json_results_test.py'), - J('pylib', 'symbols', 'apk_native_libs_unittest.py'), - J('pylib', 'symbols', 'elf_symbolizer_unittest.py'), - J('pylib', 'symbols', 'symbol_utils_unittest.py'), J('pylib', 'utils', 'chrome_proxy_utils_test.py'), J('pylib', 'utils', 'decorators_test.py'), J('pylib', 'utils', 'device_dependencies_test.py'), J('pylib', 'utils', 'dexdump_test.py'), J('pylib', 'utils', 'gold_utils_test.py'), - J('pylib', 'utils', 'proguard_test.py'), J('pylib', 'utils', 'test_filter_test.py'), - J('.', 'convert_dex_profile_tests.py'), + J('gyp', 'dex_test.py'), + J('gyp', 'util', 'build_utils_test.py'), + J('gyp', 'util', 'manifest_utils_test.py'), + J('gyp', 'util', 'md5_check_test.py'), + J('gyp', 'util', 'resource_utils_test.py'), ], env=pylib_test_env, - run_on_python2=False)) + run_on_python2=False, + run_on_python3=True, + skip_shebang_check=True)) return input_api.RunTests(tests) diff --git a/build/android/adb_chrome_public_command_line b/build/android/adb_chrome_public_command_line index 86ece8cec761..068493465b14 100755 --- a/build/android/adb_chrome_public_command_line +++ b/build/android/adb_chrome_public_command_line @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/adb_command_line.py b/build/android/adb_command_line.py index c3ec8d49d03d..8557085d5381 100755 --- a/build/android/adb_command_line.py +++ b/build/android/adb_command_line.py @@ -1,11 +1,10 @@ -#!/usr/bin/env vpython -# Copyright 2015 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Utility for reading / writing command-line flag files on device(s).""" -from __future__ import print_function import argparse import logging @@ -27,7 +26,7 @@ def CheckBuildTypeSupportsFlags(device, command_line_flags_file): raise device_errors.CommandFailedError( 'WebView only respects flags on a userdebug or eng device, yours ' 'is a user build.', device) - elif device.IsUserBuild(): + if device.IsUserBuild(): logging.warning( 'Your device (%s) is a user build; Chrome may or may not pick up ' 'your commandline flags. Check your ' diff --git a/build/android/adb_gdb b/build/android/adb_gdb index 0923210bb69e..885d597032f8 100755 --- a/build/android/adb_gdb +++ b/build/android/adb_gdb @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # @@ -355,8 +355,8 @@ elif [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then fi if [ -z "$NDK_DIR" ]; then - ANDROID_NDK_ROOT=$(PYTHONPATH=$CHROMIUM_SRC/build/android python -c \ -'from pylib.constants import ANDROID_NDK_ROOT; print ANDROID_NDK_ROOT,') + ANDROID_NDK_ROOT=$(PYTHONPATH=$CHROMIUM_SRC/build/android python3 -c \ + 'from pylib.constants import ANDROID_NDK_ROOT; print(ANDROID_NDK_ROOT,)') else if [ ! -d "$NDK_DIR" ]; then panic "Invalid directory: $NDK_DIR" @@ -573,42 +573,6 @@ get_ndk_toolchain_prebuilt () { echo "$FILE" } -# Find the path to an NDK's toolchain full prefix for a given architecture -# $1: NDK install path -# $2: NDK target architecture name -# Out: install path + binary prefix (e.g. -# ".../path/to/bin/arm-linux-androideabi-") -get_ndk_toolchain_fullprefix () { - local NDK_DIR="$1" - local ARCH="$2" - local TARGET NAME HOST_OS HOST_ARCH LD CONFIG - - # NOTE: This will need to be updated if the NDK changes the names or moves - # the location of its prebuilt toolchains. - # - LD= - HOST_OS=$(get_ndk_host_system) - HOST_ARCH=$(get_ndk_host_arch) - CONFIG=$(get_arch_gnu_config $ARCH) - LD=$(get_ndk_toolchain_prebuilt \ - "$NDK_DIR" "$ARCH" "$HOST_OS-$HOST_ARCH/bin/$CONFIG-ld") - if [ -z "$LD" -a "$HOST_ARCH" = "x86_64" ]; then - LD=$(get_ndk_toolchain_prebuilt \ - "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/$CONFIG-ld") - fi - if [ ! -f "$LD" -a "$ARCH" = "x86" ]; then - # Special case, the x86 toolchain used to be incorrectly - # named i686-android-linux-gcc! - LD=$(get_ndk_toolchain_prebuilt \ - "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/i686-android-linux-ld") - fi - if [ -z "$LD" ]; then - panic "Cannot find Android NDK toolchain for '$ARCH' architecture. \ -Please verify your NDK installation!" - fi - echo "${LD%%ld}" -} - # $1: NDK install path get_ndk_host_gdb_client() { local NDK_DIR="$1" @@ -634,28 +598,6 @@ get_ndk_gdbserver () { echo "$BINARY" } -# Check/probe the path to the Android toolchain installation. Always -# use the NDK versions of gdb and gdbserver. They must match to avoid -# issues when both binaries do not speak the same wire protocol. -# -if [ -z "$TOOLCHAIN" ]; then - ANDROID_TOOLCHAIN=$(get_ndk_toolchain_fullprefix \ - "$ANDROID_NDK_ROOT" "$TARGET_ARCH") - ANDROID_TOOLCHAIN=$(dirname "$ANDROID_TOOLCHAIN") - log "Auto-config: --toolchain=$ANDROID_TOOLCHAIN" -else - # Be flexible, allow one to specify either the install path or the bin - # sub-directory in --toolchain: - # - if [ -d "$TOOLCHAIN/bin" ]; then - TOOLCHAIN=$TOOLCHAIN/bin - fi - ANDROID_TOOLCHAIN=$TOOLCHAIN -fi - -# Cosmetic: Remove trailing directory separator. -ANDROID_TOOLCHAIN=${ANDROID_TOOLCHAIN%/} - # Find host GDB client binary if [ -z "$GDB" ]; then GDB=$(get_ndk_host_gdb_client "$ANDROID_NDK_ROOT") diff --git a/build/android/adb_install_apk.py b/build/android/adb_install_apk.py index 6ec98e2f8509..7cc6eb047a72 100755 --- a/build/android/adb_install_apk.py +++ b/build/android/adb_install_apk.py @@ -1,6 +1,6 @@ -#!/usr/bin/env vpython +#!/usr/bin/env vpython3 # -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -59,8 +59,11 @@ def main(): parser.add_argument('--adb-path', type=os.path.abspath, help='Absolute path to the adb binary to use.') parser.add_argument('--denylist-file', help='Device denylist JSON file.') - parser.add_argument('-v', '--verbose', action='count', - help='Enable verbose logging.') + parser.add_argument('-v', + '--verbose', + action='count', + help='Enable verbose logging.', + default=0) parser.add_argument('--downgrade', action='store_true', help='If set, allows downgrading of apk.') parser.add_argument('--timeout', type=int, diff --git a/build/android/adb_logcat_monitor.py b/build/android/adb_logcat_monitor.py index a919722cbab7..0b52997a29f1 100755 --- a/build/android/adb_logcat_monitor.py +++ b/build/android/adb_logcat_monitor.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -16,7 +16,6 @@ early enough to not miss anything. """ -from __future__ import print_function import logging import os @@ -33,12 +32,10 @@ class TimeoutException(Exception): """Exception used to signal a timeout.""" - pass class SigtermError(Exception): """Exception used to catch a sigterm.""" - pass def StartLogcatIfNecessary(device_id, adb_cmd, base_dir): @@ -48,12 +45,11 @@ def StartLogcatIfNecessary(device_id, adb_cmd, base_dir): if process.poll() is None: # Logcat process is still happily running return - else: - logging.info('Logcat for device %s has died', device_id) - error_filter = re.compile('- waiting for device -') - for line in process.stderr: - if not error_filter.match(line): - logging.error(device_id + ': ' + line) + logging.info('Logcat for device %s has died', device_id) + error_filter = re.compile('- waiting for device -') + for line in process.stderr: + if not error_filter.match(line): + logging.error(device_id + ': ' + line) logging.info('Starting logcat %d for device %s', logcat_num, device_id) @@ -85,7 +81,7 @@ def GetAttachedDevices(adb_cmd): stderr=subprocess.PIPE).communicate() if err: logging.warning('adb device error %s', err.strip()) - return re.findall('^(\\S+)\tdevice$', out, re.MULTILINE) + return re.findall('^(\\S+)\tdevice$', out.decode('latin1'), re.MULTILINE) except TimeoutException: logging.warning('"adb devices" command timed out') return [] @@ -141,7 +137,7 @@ def SigtermHandler(_signum, _unused_frame): except: # pylint: disable=bare-except logging.exception('Unexpected exception in main.') finally: - for process, _ in devices.itervalues(): + for process, _ in devices.values(): if process: try: process.terminate() @@ -151,8 +147,11 @@ def SigtermHandler(_signum, _unused_frame): if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) if 2 <= len(sys.argv) <= 3: print('adb_logcat_monitor: Initializing') - sys.exit(main(*sys.argv[1:3])) + if len(sys.argv) == 2: + sys.exit(main(sys.argv[1])) + sys.exit(main(sys.argv[1], sys.argv[2])) print('Usage: %s []' % sys.argv[0]) diff --git a/build/android/adb_logcat_printer.py b/build/android/adb_logcat_printer.py index a715170759d8..7f3c52aa74ea 100755 --- a/build/android/adb_logcat_printer.py +++ b/build/android/adb_logcat_printer.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -19,9 +19,9 @@ """ # pylint: disable=W0702 -import cStringIO +import argparse +import io import logging -import optparse import os import re import signal @@ -108,7 +108,7 @@ def GetDeviceLogs(log_filenames, logger): """ device_logs = [] - for device, device_files in log_filenames.iteritems(): + for device, device_files in log_filenames.items(): logger.debug('%s: %s', device, str(device_files)) device_file_lines = [] for cur_file in device_files: @@ -152,15 +152,15 @@ def ShutdownLogcatMonitor(base_dir, logger): def main(argv): - parser = optparse.OptionParser(usage='Usage: %prog [options] ') - parser.add_option('--output-path', - help='Output file path (if unspecified, prints to stdout)') - options, args = parser.parse_args(argv) - if len(args) != 1: - parser.error('Wrong number of unparsed args') - base_dir = args[0] - - log_stringio = cStringIO.StringIO() + parser = argparse.ArgumentParser() + parser.add_argument( + '--output-path', + help='Output file path (if unspecified, prints to stdout)') + parser.add_argument('log_dir') + args = parser.parse_args(argv) + base_dir = args.log_dir + + log_stringio = io.StringIO() logger = logging.getLogger('LogcatPrinter') logger.setLevel(LOG_LEVEL) sh = logging.StreamHandler(log_stringio) @@ -168,16 +168,16 @@ def main(argv): ' %(message)s')) logger.addHandler(sh) - if options.output_path: - if not os.path.exists(os.path.dirname(options.output_path)): + if args.output_path: + if not os.path.exists(os.path.dirname(args.output_path)): logger.warning('Output dir %s doesn\'t exist. Creating it.', - os.path.dirname(options.output_path)) - os.makedirs(os.path.dirname(options.output_path)) - output_file = open(options.output_path, 'w') - logger.info('Dumping logcat to local file %s. If running in a build, ' - 'this file will likely will be uploaded to google storage ' - 'in a later step. It can be downloaded from there.', - options.output_path) + os.path.dirname(args.output_path)) + os.makedirs(os.path.dirname(args.output_path)) + output_file = open(args.output_path, 'w') + logger.info( + 'Dumping logcat to local file %s. If running in a build, ' + 'this file will likely will be uploaded to google storage ' + 'in a later step. It can be downloaded from there.', args.output_path) else: output_file = sys.stdout diff --git a/build/android/adb_profile_chrome b/build/android/adb_profile_chrome index d3244ffdf605..27ecb6d7cf36 100755 --- a/build/android/adb_profile_chrome +++ b/build/android/adb_profile_chrome @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # diff --git a/build/android/adb_profile_chrome_startup b/build/android/adb_profile_chrome_startup index d5836cdf702a..bb639b9d39eb 100755 --- a/build/android/adb_profile_chrome_startup +++ b/build/android/adb_profile_chrome_startup @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # diff --git a/build/android/adb_reverse_forwarder.py b/build/android/adb_reverse_forwarder.py index 90d3139ae10b..c78f44d2ec2b 100755 --- a/build/android/adb_reverse_forwarder.py +++ b/build/android/adb_reverse_forwarder.py @@ -1,6 +1,6 @@ -#!/usr/bin/env vpython +#!/usr/bin/env vpython3 # -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -62,7 +62,7 @@ def main(argv): if len(args.ports) < 2 or len(args.ports) % 2: parser.error('Need even number of port pairs') - port_pairs = zip(args.ports[::2], args.ports[1::2]) + port_pairs = list(zip(args.ports[::2], args.ports[1::2])) if args.build_type: constants.SetBuildType(args.build_type) diff --git a/build/android/adb_system_webengine_command_line b/build/android/adb_system_webengine_command_line new file mode 100755 index 000000000000..2dce6d25fd18 --- /dev/null +++ b/build/android/adb_system_webengine_command_line @@ -0,0 +1,16 @@ +#!/bin/bash +# +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# If no flags are given, prints the current content shell flags. +# +# Otherwise, the given flags are used to REPLACE (not modify) the content shell +# flags. For example: +# adb_system_webengine_command_line --enable-webgl +# +# To remove all content shell flags, pass an empty string for the flags: +# adb_system_webengine_command_line "" + +exec $(dirname $0)/adb_command_line.py --name weblayer-command-line "$@" diff --git a/build/android/adb_system_webview_command_line b/build/android/adb_system_webview_command_line index a0d2705821d3..6b9fb4ee70c6 100755 --- a/build/android/adb_system_webview_command_line +++ b/build/android/adb_system_webview_command_line @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/android_only_explicit_jni_exports.lst b/build/android/android_only_explicit_jni_exports.lst index f989691865b2..eb7b1f2bed68 100644 --- a/build/android/android_only_explicit_jni_exports.lst +++ b/build/android/android_only_explicit_jni_exports.lst @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/android_only_jni_exports.lst b/build/android/android_only_jni_exports.lst index 1336fee14561..c44cb9b9232f 100644 --- a/build/android/android_only_jni_exports.lst +++ b/build/android/android_only_jni_exports.lst @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/apk_operations.py b/build/android/apk_operations.py index d6cd5836f1f1..2838240e7d46 100755 --- a/build/android/apk_operations.py +++ b/build/android/apk_operations.py @@ -1,12 +1,11 @@ -#!/usr/bin/env vpython -# Copyright 2017 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # Using colorama.Fore/Back/Style members # pylint: disable=no-member -from __future__ import print_function import argparse import collections @@ -125,42 +124,15 @@ def _GenerateBundleApks(info, optimize_for=optimize_for) -def _InstallBundle(devices, apk_helper_instance, package_name, - command_line_flags_file, modules, fake_modules): - # Path Chrome creates after validating fake modules. This needs to be cleared - # for pushed fake modules to be picked up. - SPLITCOMPAT_PATH = '/data/data/' + package_name + '/files/splitcompat' - # Chrome command line flag needed for fake modules to work. - FAKE_FEATURE_MODULE_INSTALL = '--fake-feature-module-install' - - def ShouldWarnFakeFeatureModuleInstallFlag(device): - if command_line_flags_file: - changer = flag_changer.FlagChanger(device, command_line_flags_file) - return FAKE_FEATURE_MODULE_INSTALL not in changer.GetCurrentFlags() - return False - - def ClearFakeModules(device): - if device.PathExists(SPLITCOMPAT_PATH, as_root=True): - device.RemovePath( - SPLITCOMPAT_PATH, force=True, recursive=True, as_root=True) - logging.info('Removed %s', SPLITCOMPAT_PATH) - else: - logging.info('Skipped removing nonexistent %s', SPLITCOMPAT_PATH) +def _InstallBundle(devices, apk_helper_instance, modules, fake_modules): def Install(device): - ClearFakeModules(device) - if fake_modules and ShouldWarnFakeFeatureModuleInstallFlag(device): - # Print warning if command line is not set up for fake modules. - msg = ('Command line has no %s: Fake modules will be ignored.' % - FAKE_FEATURE_MODULE_INSTALL) - print(_Colorize(msg, colorama.Fore.YELLOW + colorama.Style.BRIGHT)) - - device.Install( - apk_helper_instance, - permissions=[], - modules=modules, - fake_modules=fake_modules, - allow_downgrade=True) + device.Install(apk_helper_instance, + permissions=[], + modules=modules, + fake_modules=fake_modules, + allow_downgrade=True, + reinstall=True) # Basic checks for |modules| and |fake_modules|. # * |fake_modules| cannot include 'base'. @@ -215,24 +187,95 @@ def _NormalizeProcessName(debug_process_name, package_name): return debug_process_name -def _LaunchUrl(devices, package_name, argv=None, command_line_flags_file=None, - url=None, apk=None, wait_for_java_debugger=False, - debug_process_name=None, nokill=None): +def _ResolveActivity(device, package_name, category, action): + # E.g.: + # Activity Resolver Table: + # Schemes: + # http: + # 67e97c0 org.chromium.pkg/.MainActivityfilter c91d43e + # Action: "android.intent.action.VIEW" + # Category: "android.intent.category.DEFAULT" + # Category: "android.intent.category.BROWSABLE" + # Scheme: "http" + # Scheme: "https" + # + # Non-Data Actions: + # android.intent.action.MAIN: + # 67e97c0 org.chromium.pkg/.MainActivity filter 4a34cf9 + # Action: "android.intent.action.MAIN" + # Category: "android.intent.category.LAUNCHER" + lines = device.RunShellCommand(['dumpsys', 'package', package_name], + check_return=True) + + # Extract the Activity Resolver Table: section. + start_idx = next((i for i, l in enumerate(lines) + if l.startswith('Activity Resolver Table:')), None) + if start_idx is None: + if not device.IsApplicationInstalled(package_name): + raise Exception('Package not installed: ' + package_name) + raise Exception('No Activity Resolver Table in:\n' + '\n'.join(lines)) + line_count = next(i for i, l in enumerate(lines[start_idx + 1:]) + if l and not l[0].isspace()) + data = '\n'.join(lines[start_idx:start_idx + line_count]) + + # Split on each Activity entry. + entries = re.split(r'^ [0-9a-f]+ ', data, flags=re.MULTILINE) + + def activity_name_from_entry(entry): + assert entry.startswith(package_name), 'Got: ' + entry + activity_name = entry[len(package_name) + 1:].split(' ', 1)[0] + if activity_name[0] == '.': + activity_name = package_name + activity_name + return activity_name + + # Find the one with the text we want. + category_text = f'Category: "{category}"' + action_text = f'Action: "{action}"' + matched_entries = [ + e for e in entries[1:] if category_text in e and action_text in e + ] + + if not matched_entries: + raise Exception(f'Did not find {category_text}, {action_text} in\n{data}') + if len(matched_entries) > 1: + # When there are multiple matches, look for the one marked as default. + # Necessary for Monochrome, which also has MonochromeLauncherActivity. + default_entries = [ + e for e in matched_entries if 'android.intent.category.DEFAULT' in e + ] + matched_entries = default_entries or matched_entries + + # See if all matches point to the same activity. + activity_names = {activity_name_from_entry(e) for e in matched_entries} + + if len(activity_names) > 1: + raise Exception('Found multiple launcher activities:\n * ' + + '\n * '.join(sorted(activity_names))) + return next(iter(activity_names)) + + +def _LaunchUrl(devices, + package_name, + argv=None, + command_line_flags_file=None, + url=None, + wait_for_java_debugger=False, + debug_process_name=None, + nokill=None): if argv and command_line_flags_file is None: raise Exception('This apk does not support any flags.') - if url: - # TODO(agrieve): Launch could be changed to require only package name by - # parsing "dumpsys package" rather than relying on the apk. - if not apk: - raise Exception('Launching with URL is not supported when using ' - '--package-name. Use --apk-path instead.') - view_activity = apk.GetViewActivityName() - if not view_activity: - raise Exception('APK does not support launching with URLs.') debug_process_name = _NormalizeProcessName(debug_process_name, package_name) + if url is None: + category = 'android.intent.category.LAUNCHER' + action = 'android.intent.action.MAIN' + else: + category = 'android.intent.category.BROWSABLE' + action = 'android.intent.action.VIEW' + def launch(device): + activity = _ResolveActivity(device, package_name, category, action) # --persistent is required to have Settings.Global.DEBUG_APP be set, which # we currently use to allow reading of flags. https://crbug.com/784947 if not nokill: @@ -255,18 +298,13 @@ def launch(device): except device_errors.AdbShellCommandFailedError: logging.exception('Failed to set flags') - if url is None: - # Simulate app icon click if no url is present. - cmd = [ - 'am', 'start', '-p', package_name, '-c', - 'android.intent.category.LAUNCHER', '-a', 'android.intent.action.MAIN' - ] - device.RunShellCommand(cmd, check_return=True) - else: - launch_intent = intent.Intent(action='android.intent.action.VIEW', - activity=view_activity, data=url, - package=package_name) - device.StartActivity(launch_intent) + launch_intent = intent.Intent(action=action, + activity=activity, + data=url, + package=package_name) + logging.info('Sending launch intent for %s', activity) + device.StartActivity(launch_intent) + device_utils.DeviceUtils.parallel(devices).pMap(launch) if wait_for_java_debugger: print('Waiting for debugger to attach to process: ' + @@ -472,8 +510,8 @@ def disk_usage_helper(d): code_path = re.search(r'codePath=(.*)', package_output).group(1) lib_path = re.search(r'(?:legacyN|n)ativeLibrary(?:Dir|Path)=(.*)', package_output).group(1) - except AttributeError: - raise Exception('Error parsing dumpsys output: ' + package_output) + except AttributeError as e: + raise Exception('Error parsing dumpsys output: ' + package_output) from e if code_path.startswith('/system'): logging.warning('Measurement of system image apks can be innacurate') @@ -535,8 +573,8 @@ def disk_usage_helper(d): compilation_filter) def print_sizes(desc, sizes): - print('%s: %d KiB' % (desc, sum(sizes.itervalues()))) - for path, size in sorted(sizes.iteritems()): + print('%s: %d KiB' % (desc, sum(sizes.values()))) + for path, size in sorted(sizes.items()): print(' %s: %s KiB' % (path, size)) parallel_devices = device_utils.DeviceUtils.parallel(devices) @@ -548,7 +586,7 @@ def print_sizes(desc, sizes): (data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes, compilation_filter) = result - total = sum(sum(sizes.itervalues()) for sizes in result[:-1]) + total = sum(sum(sizes.values()) for sizes in result[:-1]) print_sizes('Apk', apk_sizes) print_sizes('App Data (non-code cache)', data_dir_sizes) @@ -563,12 +601,12 @@ def print_sizes(desc, sizes): print('Total: %s KiB (%.1f MiB)' % (total, total / 1024.0)) -class _LogcatProcessor(object): +class _LogcatProcessor: ParsedLine = collections.namedtuple( 'ParsedLine', ['date', 'invokation_time', 'pid', 'tid', 'priority', 'tag', 'message']) - class NativeStackSymbolizer(object): + class NativeStackSymbolizer: """Buffers lines from native stacks and symbolizes them when done.""" # E.g.: #06 pc 0x0000d519 /apex/com.android.runtime/lib/libart.so # E.g.: #01 pc 00180c8d /data/data/.../lib/libbase.cr.so @@ -582,9 +620,12 @@ def __init__(self, stack_script_context, print_func): def _FlushLines(self): """Prints queued lines after sending them through stack.py.""" + if self._crash_lines_buffer is None: + return + crash_lines = self._crash_lines_buffer self._crash_lines_buffer = None - with tempfile.NamedTemporaryFile() as f: + with tempfile.NamedTemporaryFile(mode='w') as f: f.writelines(x[0].message + '\n' for x in crash_lines) f.flush() proc = self._stack_script_context.Popen( @@ -612,8 +653,7 @@ def AddLine(self, parsed_line, dim): self._crash_lines_buffer.append((parsed_line, dim)) return - if self._crash_lines_buffer is not None: - self._FlushLines() + self._FlushLines() self._print_func(parsed_line, dim) @@ -624,6 +664,7 @@ def AddLine(self, parsed_line, dim): 'ActivityManager', # Shows activity lifecycle messages. 'ActivityTaskManager', # More activity lifecycle messages. 'AndroidRuntime', # Java crash dumps + 'AppZygoteInit', # Android's native application zygote support. 'DEBUG', # Native crash dump. } @@ -647,11 +688,19 @@ def __init__(self, package_name, stack_script_context, deobfuscate=None, - verbose=False): + verbose=False, + exit_on_match=None, + extra_package_names=None): self._device = device self._package_name = package_name + self._extra_package_names = extra_package_names or [] self._verbose = verbose self._deobfuscator = deobfuscate + if exit_on_match is not None: + self._exit_on_match = re.compile(exit_on_match) + else: + self._exit_on_match = None + self._found_exit_match = False self._native_stack_symbolizer = _LogcatProcessor.NativeStackSymbolizer( stack_script_context, self._PrintParsedLine) # Process ID for the app's main process (with no :name suffix). @@ -665,7 +714,7 @@ def __init__(self, # START u0 {act=android.intent.action.MAIN \ # cat=[android.intent.category.LAUNCHER] \ # flg=0x10000000 pkg=com.google.chromeremotedesktop} from uid 2000 - self._start_pattern = re.compile(r'START .*pkg=' + package_name) + self._start_pattern = re.compile(r'START .*(?:cmp|pkg)=' + package_name) self.nonce = 'Chromium apk_operations.py nonce={}'.format(random.random()) # Holds lines buffered on start-up, before we find our nonce message. @@ -674,7 +723,7 @@ def __init__(self, # Give preference to PID reported by "ps" over those found from # _start_pattern. There can be multiple "Start proc" messages from prior # runs of the app. - self._found_initial_pid = self._primary_pid != None + self._found_initial_pid = self._primary_pid is not None # Retrieve any additional patterns that are relevant for the User. self._user_defined_highlight = None user_regex = os.environ.get('CHROMIUM_LOGCAT_HIGHLIGHT') @@ -690,20 +739,21 @@ def _UpdateMyPids(self): # ProcessLine method below also includes lines from processes which may # have already exited. self._primary_pid = None - for process in _GetPackageProcesses(self._device, self._package_name): - # We take only the first "main" process found in order to account for - # possibly forked() processes. - if ':' not in process.name and self._primary_pid is None: - self._primary_pid = process.pid - self._my_pids.add(process.pid) + for package_name in [self._package_name] + self._extra_package_names: + for process in _GetPackageProcesses(self._device, package_name): + # We take only the first "main" process found in order to account for + # possibly forked() processes. + if ':' not in process.name and self._primary_pid is None: + self._primary_pid = process.pid + self._my_pids.add(process.pid) def _GetPidStyle(self, pid, dim=False): if pid == self._primary_pid: return colorama.Fore.WHITE - elif pid in self._my_pids: + if pid in self._my_pids: # TODO(wnwen): Use one separate persistent color per process, pop LRU return colorama.Fore.YELLOW - elif dim: + if dim: return colorama.Style.DIM return '' @@ -712,7 +762,7 @@ def _GetPriorityStyle(self, priority, dim=False): if dim: return '' style = colorama.Fore.BLACK - if priority == 'E' or priority == 'F': + if priority in ('E', 'F'): style += colorama.Back.RED elif priority == 'W': style += colorama.Back.YELLOW @@ -758,6 +808,9 @@ def consume_integer_token_or_default(default): date, invokation_time, pid, tid, priority, tag, original_message) def _PrintParsedLine(self, parsed_line, dim=False): + if self._exit_on_match and self._exit_on_match.search(parsed_line.message): + self._found_exit_match = True + tid_style = colorama.Style.NORMAL user_match = self._user_defined_highlight and ( re.search(self._user_defined_highlight, parsed_line.tag) @@ -794,6 +847,9 @@ def _TriggerNonceFound(self): self._initial_buffered_lines = None self.nonce = None + def FoundExitMatch(self): + return self._found_exit_match + def ProcessLine(self, line): if not line or line.startswith('------'): return @@ -842,14 +898,26 @@ def ProcessLine(self, line): self._initial_buffered_lines.append((log, not owned_pid)) -def _RunLogcat(device, package_name, stack_script_context, deobfuscate, - verbose): - logcat_processor = _LogcatProcessor( - device, package_name, stack_script_context, deobfuscate, verbose) +def _RunLogcat(device, + package_name, + stack_script_context, + deobfuscate, + verbose, + exit_on_match=None, + extra_package_names=None): + logcat_processor = _LogcatProcessor(device, + package_name, + stack_script_context, + deobfuscate, + verbose, + exit_on_match=exit_on_match, + extra_package_names=extra_package_names) device.RunShellCommand(['log', logcat_processor.nonce]) for line in device.adb.Logcat(logcat_format='threadtime'): try: logcat_processor.ProcessLine(line) + if logcat_processor.FoundExitMatch(): + return except: sys.stderr.write('Failed to process line: ' + line + '\n') # Skip stack trace for the common case of the adb server being @@ -860,9 +928,11 @@ def _RunLogcat(device, package_name, stack_script_context, deobfuscate, def _GetPackageProcesses(device, package_name): + my_names = (package_name, package_name + '_zygote') return [ p for p in device.ListProcesses(package_name) - if p.name == package_name or p.name.startswith(package_name + ':')] + if p.name in my_names or p.name.startswith(package_name + ':') + ] def _RunPs(devices, package_name): @@ -912,7 +982,7 @@ def _RunCompileDex(devices, package_name, compilation_filter): def _RunProfile(device, package_name, host_build_directory, pprof_out_path, - process_specifier, thread_specifier, extra_args): + process_specifier, thread_specifier, events, extra_args): simpleperf.PrepareDevice(device) device_simpleperf_path = simpleperf.InstallSimpleperf(device, package_name) with tempfile.NamedTemporaryFile() as fh: @@ -920,11 +990,10 @@ def _RunProfile(device, package_name, host_build_directory, pprof_out_path, with simpleperf.RunSimpleperf(device, device_simpleperf_path, package_name, process_specifier, thread_specifier, - extra_args, host_simpleperf_out_path): - sys.stdout.write('Profiler is running; press Enter to stop...') + events, extra_args, host_simpleperf_out_path): + sys.stdout.write('Profiler is running; press Enter to stop...\n') sys.stdin.read(1) - sys.stdout.write('Post-processing data...') - sys.stdout.flush() + sys.stdout.write('Post-processing data...\n') simpleperf.ConvertSimpleperfToPprof(host_simpleperf_out_path, host_build_directory, pprof_out_path) @@ -941,7 +1010,7 @@ def _RunProfile(device, package_name, host_build_directory, pprof_out_path, """ % {'s': pprof_out_path})) -class _StackScriptContext(object): +class _StackScriptContext: """Maintains temporary files needed by stack.py.""" def __init__(self, @@ -1000,7 +1069,7 @@ def Popen(self, input_file=None, **kwargs): if input_file: cmd.append(input_file) logging.info('Running stack.py') - return subprocess.Popen(cmd, **kwargs) + return subprocess.Popen(cmd, universal_newlines=True, **kwargs) def _GenerateAvailableDevicesMessage(devices): @@ -1063,7 +1132,7 @@ def _SaveDeviceCaches(devices, output_directory): logging.info('Wrote device cache: %s', cache_path) -class _Command(object): +class _Command: name = None description = None long_description = None @@ -1078,7 +1147,7 @@ class _Command(object): calls_exec = False supports_multiple_devices = True - def __init__(self, from_wrapper_script, is_bundle): + def __init__(self, from_wrapper_script, is_bundle, is_test_apk): self._parser = None self._from_wrapper_script = from_wrapper_script self.args = None @@ -1087,6 +1156,7 @@ def __init__(self, from_wrapper_script, is_bundle): self.install_dict = None self.devices = None self.is_bundle = is_bundle + self.is_test_apk = is_test_apk self.bundle_generation_info = None # Only support incremental install from APK wrapper scripts. if is_bundle or not from_wrapper_script: @@ -1095,7 +1165,7 @@ def __init__(self, from_wrapper_script, is_bundle): def RegisterBundleGenerationInfo(self, bundle_generation_info): self.bundle_generation_info = bundle_generation_info - def _RegisterExtraArgs(self, subp): + def _RegisterExtraArgs(self, group): pass def RegisterArgs(self, parser): @@ -1341,8 +1411,7 @@ def Run(self): modules = list( set(self.args.module) - set(self.args.no_module) - set(self.args.fake)) - _InstallBundle(self.devices, self.apk_helper, self.args.package_name, - self.args.command_line_flags_file, modules, self.args.fake) + _InstallBundle(self.devices, self.apk_helper, modules, self.args.fake) else: _InstallApk(self.devices, self.apk_helper, self.install_dict) @@ -1393,13 +1462,13 @@ def _RegisterExtraArgs(self, group): group.add_argument('url', nargs='?', help='A URL to launch with.') def Run(self): - if self.args.url and self.is_bundle: - # TODO(digit): Support this, maybe by using 'dumpsys' as described - # in the _LaunchUrl() comment. - raise Exception('Launching with URL not supported for bundles yet!') - _LaunchUrl(self.devices, self.args.package_name, argv=self.args.args, + if self.is_test_apk: + raise Exception('Use the bin/run_* scripts to run test apks.') + _LaunchUrl(self.devices, + self.args.package_name, + argv=self.args.args, command_line_flags_file=self.args.command_line_flags_file, - url=self.args.url, apk=self.apk_helper, + url=self.args.url, wait_for_java_debugger=self.args.wait_for_java_debugger, debug_process_name=self.args.debug_process_name, nokill=self.args.nokill) @@ -1511,9 +1580,20 @@ def Run(self): self.args.apk_path, self.bundle_generation_info, quiet=True) + + extra_package_names = [] + if self.is_test_apk and self.additional_apk_helpers: + for additional_apk_helper in self.additional_apk_helpers: + extra_package_names.append(additional_apk_helper.GetPackageName()) + try: - _RunLogcat(self.devices[0], self.args.package_name, stack_script_context, - deobfuscate, bool(self.args.verbose_count)) + _RunLogcat(self.devices[0], + self.args.package_name, + stack_script_context, + deobfuscate, + bool(self.args.verbose_count), + self.args.exit_on_match, + extra_package_names=extra_package_names) except KeyboardInterrupt: pass # Don't show stack trace upon Ctrl-C finally: @@ -1529,6 +1609,8 @@ def _RegisterExtraArgs(self, group): group.set_defaults(no_deobfuscate=False) group.add_argument('--proguard-mapping-path', help='Path to ProGuard map (enables deobfuscation)') + group.add_argument('--exit-on-match', + help='Exits logcat when a message matches this regex.') class _PsCommand(_Command): @@ -1628,6 +1710,8 @@ def _RegisterExtraArgs(self, group): def Run(self): keytool = os.path.join(_JAVA_HOME, 'bin', 'keytool') + pem_certificate_pattern = re.compile( + r'-+BEGIN CERTIFICATE-+([\r\n0-9A-Za-z+/=]+)-+END CERTIFICATE-+[\r\n]*') if self.is_bundle: # Bundles are not signed until converted to .apks. The wrapper scripts # record which key will be used to sign though. @@ -1647,36 +1731,72 @@ def Run(self): if self.args.full_cert: # Redirect stderr to hide a keytool warning about using non-standard # keystore format. - full_output = subprocess.check_output( - cmd + ['-rfc'], stderr=subprocess.STDOUT) + pem_encoded_certificate = subprocess.check_output( + cmd + ['-rfc'], stderr=subprocess.STDOUT).decode() else: - cmd = [ - build_tools.GetPath('apksigner'), 'verify', '--print-certs', - '--verbose', self.apk_helper.path + + def run_apksigner(min_sdk_version): + cmd = [ + build_tools.GetPath('apksigner'), 'verify', '--min-sdk-version', + str(min_sdk_version), '--print-certs-pem', '--verbose', + self.apk_helper.path + ] + logging.warning('Running: %s', ' '.join(cmd)) + env = os.environ.copy() + env['PATH'] = os.path.pathsep.join( + [os.path.join(_JAVA_HOME, 'bin'), + env.get('PATH')]) + # Redirect stderr to hide verification failures (see explanation below). + return subprocess.check_output(cmd, + env=env, + universal_newlines=True, + stderr=subprocess.STDOUT) + + # apksigner's default behavior is nonintuitive: it will print "Verified + # using ...: false" for any scheme which is obsolete for + # the APK's minSdkVersion even if it actually was signed with that scheme + # (ex. it prints "Verified using v1 scheme: false" for Monochrome because + # v1 was obsolete by N). To workaround this, we force apksigner to use the + # lowest possible minSdkVersion. We need to fallback to higher + # minSdkVersions in case the APK fails to verify for that minSdkVersion + # (which means the APK is genuinely not signed with that scheme). These + # SDK values are the highest SDK version before the next scheme is + # available: + versions = [ + version_codes.MARSHMALLOW, # before v2 launched in N + version_codes.OREO_MR1, # before v3 launched in P + version_codes.Q, # before v4 launched in R + version_codes.R, ] - logging.warning('Running: %s', ' '.join(cmd)) - env = os.environ.copy() - env['PATH'] = os.path.pathsep.join( - [os.path.join(_JAVA_HOME, 'bin'), - env.get('PATH')]) - stdout = subprocess.check_output(cmd, env=env) - print(stdout) + stdout = None + for min_sdk_version in versions: + try: + stdout = run_apksigner(min_sdk_version) + break + except subprocess.CalledProcessError: + # Doesn't verify with this min-sdk-version, so try again with a higher + # one + continue + if not stdout: + raise RuntimeError('apksigner was not able to verify APK') + + # Separate what the '--print-certs' flag would output vs. the additional + # signature output included by '--print-certs-pem'. The additional PEM + # output is only printed when self.args.full_cert is specified. + verification_hash_info = pem_certificate_pattern.sub('', stdout) + print(verification_hash_info) if self.args.full_cert: - if 'v1 scheme (JAR signing): true' not in stdout: - raise Exception( - 'Cannot print full certificate because apk is not V1 signed.') + m = pem_certificate_pattern.search(stdout) + if not m: + raise Exception('apksigner did not print a certificate') + pem_encoded_certificate = m.group(0) - cmd = [keytool, '-printcert', '-jarfile', self.apk_helper.path, '-rfc'] - # Redirect stderr to hide a keytool warning about using non-standard - # keystore format. - full_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) if self.args.full_cert: - m = re.search( - r'-+BEGIN CERTIFICATE-+([\r\n0-9A-Za-z+/=]+)-+END CERTIFICATE-+', - full_output, re.MULTILINE) + m = pem_certificate_pattern.search(pem_encoded_certificate) if not m: - raise Exception('Unable to parse certificate:\n{}'.format(full_output)) + raise Exception( + 'Unable to parse certificate:\n{}'.format(pem_encoded_certificate)) signature = re.sub(r'[\r\n]+', '', m.group(1)) print() print('Full Signature:') @@ -1712,13 +1832,18 @@ def _RegisterExtraArgs(self, group): 'use --profile-thread=main).')) group.add_argument('--profile-output', default='profile.pb', help='Output file for profiling data') + group.add_argument('--profile-events', default='cpu-cycles', + help=('A comma separated list of perf events to capture ' + '(e.g. \'cpu-cycles,branch-misses\'). Run ' + '`simpleperf list` on your device to see available ' + 'events.')) def Run(self): extra_args = shlex.split(self.args.args or '') _RunProfile(self.devices[0], self.args.package_name, self.args.output_directory, self.args.profile_output, self.args.profile_process, self.args.profile_thread, - extra_args) + self.args.profile_events, extra_args) class _RunCommand(_InstallCommand, _LaunchCommand, _LogcatCommand): @@ -1735,6 +1860,8 @@ def _RegisterExtraArgs(self, group): help='Install and launch, but do not enter logcat.') def Run(self): + if self.is_test_apk: + raise Exception('Use the bin/run_* scripts to run test apks.') logging.warning('Installing...') _InstallCommand.Run(self) logging.warning('Sending launch intent...') @@ -1785,13 +1912,23 @@ def Run(self): class _ManifestCommand(_Command): name = 'dump-manifest' - description = 'Dump the android manifest from this bundle, as XML, to stdout.' + description = 'Dump the android manifest as XML, to stdout.' need_device_args = False + needs_apk_helper = True def Run(self): - bundletool.RunBundleTool([ - 'dump', 'manifest', '--bundle', self.bundle_generation_info.bundle_path - ]) + if self.is_bundle: + sys.stdout.write( + bundletool.RunBundleTool([ + 'dump', 'manifest', '--bundle', + self.bundle_generation_info.bundle_path + ])) + else: + apkanalyzer = os.path.join(_DIR_SOURCE_ROOT, 'third_party', 'android_sdk', + 'public', 'cmdline-tools', 'latest', 'bin', + 'apkanalyzer') + subprocess.check_call( + [apkanalyzer, 'manifest', 'print', self.apk_helper.path]) class _StackCommand(_Command): @@ -1839,19 +1976,22 @@ def Run(self): _ProfileCommand, _RunCommand, _StackCommand, + _ManifestCommand, ] # Commands specific to app bundles. _BUNDLE_COMMANDS = [ _BuildBundleApks, - _ManifestCommand, ] -def _ParseArgs(parser, from_wrapper_script, is_bundle): +def _ParseArgs(parser, from_wrapper_script, is_bundle, is_test_apk): subparsers = parser.add_subparsers() command_list = _COMMANDS + (_BUNDLE_COMMANDS if is_bundle else []) - commands = [clazz(from_wrapper_script, is_bundle) for clazz in command_list] + commands = [ + clazz(from_wrapper_script, is_bundle, is_test_apk) + for clazz in command_list + ] for command in commands: if from_wrapper_script or not command.needs_output_directory: @@ -1868,13 +2008,17 @@ def _ParseArgs(parser, from_wrapper_script, is_bundle): def _RunInternal(parser, output_directory=None, additional_apk_paths=None, - bundle_generation_info=None): + bundle_generation_info=None, + is_test_apk=False): colorama.init() parser.set_defaults( additional_apk_paths=additional_apk_paths, output_directory=output_directory) from_wrapper_script = bool(output_directory) - args = _ParseArgs(parser, from_wrapper_script, bool(bundle_generation_info)) + args = _ParseArgs(parser, + from_wrapper_script, + is_bundle=bool(bundle_generation_info), + is_test_apk=is_test_apk) run_tests_helper.SetLogLevel(args.verbose_count) if bundle_generation_info: args.command.RegisterBundleGenerationInfo(bundle_generation_info) @@ -1961,6 +2105,39 @@ def RunForBundle(output_directory, bundle_path, bundle_apks_path, bundle_generation_info=bundle_generation_info) +def RunForTestApk(*, output_directory, package_name, test_apk_path, + test_apk_json, proguard_mapping_path, additional_apk_paths): + """Entry point for generated test apk wrapper scripts. + + This is intended to make commands like logcat (with proguard deobfuscation) + available. The run_* scripts should be used to actually run tests. + + Args: + output_dir: Chromium output directory path. + package_name: The package name for the test apk. + test_apk_path: The test apk to install. + test_apk_json: The incremental json dict for the test apk. + proguard_mapping_path: Input path to the Proguard mapping file, used to + deobfuscate Java stack traces. + additional_apk_paths: Additional APKs to install. + """ + constants.SetOutputDirectory(output_directory) + devil_chromium.Initialize(output_directory=output_directory) + + parser = argparse.ArgumentParser() + exists_or_none = lambda p: p if p and os.path.exists(p) else None + + parser.set_defaults(apk_path=exists_or_none(test_apk_path), + incremental_json=exists_or_none(test_apk_json), + package_name=package_name, + proguard_mapping_path=proguard_mapping_path) + + _RunInternal(parser, + output_directory=output_directory, + additional_apk_paths=additional_apk_paths, + is_test_apk=True) + + def main(): devil_chromium.Initialize() _RunInternal(argparse.ArgumentParser()) diff --git a/build/android/apk_operations.pydeps b/build/android/apk_operations.pydeps index 60b128942e72..d20bcf24581e 100644 --- a/build/android/apk_operations.pydeps +++ b/build/android/apk_operations.pydeps @@ -64,7 +64,8 @@ ../../third_party/catapult/devil/devil/utils/zip_utils.py ../../third_party/catapult/third_party/six/six.py ../../third_party/jinja2/__init__.py -../../third_party/jinja2/_compat.py +../../third_party/jinja2/_identifier.py +../../third_party/jinja2/async_utils.py ../../third_party/jinja2/bccache.py ../../third_party/jinja2/compiler.py ../../third_party/jinja2/defaults.py @@ -84,11 +85,12 @@ ../../third_party/markupsafe/__init__.py ../../third_party/markupsafe/_compat.py ../../third_party/markupsafe/_native.py +../action_helpers.py ../gn_helpers.py ../print_python_deps.py +../zip_helpers.py adb_command_line.py apk_operations.py -convert_dex_profile.py devil_chromium.py gyp/bundletool.py gyp/dex.py @@ -96,7 +98,6 @@ gyp/util/__init__.py gyp/util/build_utils.py gyp/util/md5_check.py gyp/util/resource_utils.py -gyp/util/zipalign.py incremental_install/__init__.py incremental_install/installer.py pylib/__init__.py @@ -104,6 +105,7 @@ pylib/constants/__init__.py pylib/constants/host_paths.py pylib/symbols/__init__.py pylib/symbols/deobfuscator.py +pylib/symbols/expensive_line_transformer.py pylib/utils/__init__.py pylib/utils/app_bundle_utils.py pylib/utils/simpleperf.py diff --git a/build/android/apply_shared_preference_file.py b/build/android/apply_shared_preference_file.py index 187bf18284d4..a4aa4994cf79 100755 --- a/build/android/apply_shared_preference_file.py +++ b/build/android/apply_shared_preference_file.py @@ -1,6 +1,6 @@ -#!/usr/bin/env vpython +#!/usr/bin/env vpython3 # -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/asan_symbolize.py b/build/android/asan_symbolize.py index 65850898739b..3274b95042f7 100755 --- a/build/android/asan_symbolize.py +++ b/build/android/asan_symbolize.py @@ -1,13 +1,12 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import print_function +import argparse import collections -import optparse import os import re import sys @@ -52,7 +51,7 @@ def _ParseAsanLogLine(line): return AsanParsedLine(prefix=m.group('prefix'), library=m.group('lib'), pos=m.group('pos'), - rel_address='%08x' % int(m.group('addr'), 16)) + rel_address=int(m.group('addr'), 16)) def _FindASanLibraries(): @@ -98,16 +97,16 @@ def _PrintSymbolized(asan_input, arch): # Maps library -> { address -> [(symbol, location, obj_sym_with_offset)...] } all_symbols = collections.defaultdict(dict) - for library, items in libraries.iteritems(): + for library, items in libraries.items(): libname = _TranslateLibPath(library, asan_libs) - lib_relative_addrs = set([i.rel_address for i in items]) + lib_relative_addrs = set(i.rel_address for i in items) # pylint: disable=no-member - info_dict = symbol.SymbolInformationForSet(libname, - lib_relative_addrs, - True, - cpu_arch=arch) - if info_dict: - all_symbols[library] = info_dict + symbols_by_library = symbol.SymbolInformationForSet(libname, + lib_relative_addrs, + True, + cpu_arch=arch) + if symbols_by_library: + all_symbols[library] = symbols_by_library for log_line in asan_log_lines: m = log_line.parsed @@ -118,33 +117,36 @@ def _PrintSymbolized(asan_input, arch): # that usually one wants to display the last list item, not the first. # The code below takes the first, is this the best choice here? s = all_symbols[m.library][m.rel_address][0] - print('%s%s %s %s' % (m.prefix, m.pos, s[0], s[1])) + symbol_name = s[0] + symbol_location = s[1] + print('%s%s %s %s @ \'%s\'' % + (m.prefix, m.pos, hex(m.rel_address), symbol_name, symbol_location)) else: print(log_line.raw) def main(): - parser = optparse.OptionParser() - parser.add_option('-l', '--logcat', - help='File containing adb logcat output with ASan stacks. ' - 'Use stdin if not specified.') - parser.add_option('--output-directory', - help='Path to the root build directory.') - parser.add_option('--arch', default='arm', - help='CPU architecture name') - options, _ = parser.parse_args() - - if options.output_directory: - constants.SetOutputDirectory(options.output_directory) + parser = argparse.ArgumentParser() + parser.add_argument('-l', + '--logcat', + help='File containing adb logcat output with ASan ' + 'stacks. Use stdin if not specified.') + parser.add_argument('--output-directory', + help='Path to the root build directory.') + parser.add_argument('--arch', default='arm', help='CPU architecture name') + args = parser.parse_args() + + if args.output_directory: + constants.SetOutputDirectory(args.output_directory) # Do an up-front test that the output directory is known. constants.CheckOutputDirectory() - if options.logcat: - asan_input = file(options.logcat, 'r') + if args.logcat: + asan_input = open(args.logcat, 'r') else: asan_input = sys.stdin - _PrintSymbolized(asan_input.readlines(), options.arch) + _PrintSymbolized(asan_input.readlines(), args.arch) if __name__ == "__main__": diff --git a/build/android/bytecode/BUILD.gn b/build/android/bytecode/BUILD.gn index 36b54329f654..9478807d78e4 100644 --- a/build/android/bytecode/BUILD.gn +++ b/build/android/bytecode/BUILD.gn @@ -1,21 +1,25 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/config/android/rules.gni") java_binary("bytecode_processor") { + main_class = "org.chromium.bytecode.ByteCodeProcessor" + wrapper_script_name = "helper/bytecode_processor" + deps = [ ":bytecode_processor_java" ] +} + +java_library("bytecode_processor_java") { sources = [ "java/org/chromium/bytecode/ByteCodeProcessor.java", "java/org/chromium/bytecode/ClassPathValidator.java", "java/org/chromium/bytecode/TypeUtils.java", ] - main_class = "org.chromium.bytecode.ByteCodeProcessor" deps = [ "//third_party/android_deps:org_ow2_asm_asm_java", "//third_party/android_deps:org_ow2_asm_asm_util_java", ] - wrapper_script_name = "helper/bytecode_processor" enable_bytecode_checks = false } @@ -54,3 +58,29 @@ java_library("fragment_activity_replacer_java") { "//third_party/android_deps:org_ow2_asm_asm_util_java", ] } + +java_binary("trace_event_adder") { + main_class = "org.chromium.bytecode.TraceEventAdder" + deps = [ ":trace_event_adder_java" ] + wrapper_script_name = "helper/trace_event_adder" +} + +java_library("trace_event_adder_java") { + visibility = [ ":*" ] + sources = [ + "java/org/chromium/bytecode/ByteCodeRewriter.java", + "java/org/chromium/bytecode/EmptyOverrideGeneratorClassAdapter.java", + "java/org/chromium/bytecode/MethodCheckerClassAdapter.java", + "java/org/chromium/bytecode/MethodDescription.java", + "java/org/chromium/bytecode/ParentMethodCheckerClassAdapter.java", + "java/org/chromium/bytecode/TraceEventAdder.java", + "java/org/chromium/bytecode/TraceEventAdderClassAdapter.java", + "java/org/chromium/bytecode/TraceEventAdderMethodAdapter.java", + ] + deps = [ + ":bytecode_processor_java", + "//third_party/android_deps:org_ow2_asm_asm_commons_java", + "//third_party/android_deps:org_ow2_asm_asm_java", + "//third_party/android_deps:org_ow2_asm_asm_util_java", + ] +} diff --git a/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java b/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java index b767f4f08907..48624914717d 100644 --- a/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java +++ b/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java @@ -1,4 +1,4 @@ -// Copyright 2017 The Chromium Authors. All rights reserved. +// Copyright 2017 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java b/build/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java index 3d0d9cdd47dc..b97f87dada38 100644 --- a/build/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java +++ b/build/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -9,6 +9,7 @@ import org.objectweb.asm.ClassWriter; import java.io.BufferedInputStream; +import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; @@ -31,16 +32,23 @@ public void rewrite(File inputJar, File outputJar) throws IOException { if (!inputJar.exists()) { throw new FileNotFoundException("Input jar not found: " + inputJar.getPath()); } - try (InputStream inputStream = new BufferedInputStream(new FileInputStream(inputJar))) { - try (OutputStream outputStream = new FileOutputStream(outputJar)) { - processZip(inputStream, outputStream); - } + + try (InputStream inputStream = new BufferedInputStream(new FileInputStream(inputJar)); + OutputStream outputStream = new FileOutputStream(outputJar)) { + processZip(inputStream, outputStream); } } /** Returns true if the class at the given path in the archive should be rewritten. */ protected abstract boolean shouldRewriteClass(String classPath); + /** + * Returns true if the class at the given {@link ClassReader} should be rewritten. + */ + protected boolean shouldRewriteClass(ClassReader classReader) { + return true; + } + /** * Returns the ClassVisitor that should be used to modify the bytecode of class at the given * path in the archive. @@ -49,21 +57,35 @@ protected abstract ClassVisitor getClassVisitorForClass( String classPath, ClassVisitor delegate); private void processZip(InputStream inputStream, OutputStream outputStream) { - try (ZipOutputStream zipOutputStream = new ZipOutputStream(outputStream)) { - ZipInputStream zipInputStream = new ZipInputStream(inputStream); + try (ZipOutputStream zipOutputStream = new ZipOutputStream(outputStream); + ZipInputStream zipInputStream = new ZipInputStream(inputStream)) { ZipEntry entry; while ((entry = zipInputStream.getNextEntry()) != null) { - ByteArrayOutputStream buffer = new ByteArrayOutputStream(); - boolean handled = processClassEntry(entry, zipInputStream, buffer); + // Get the uncompressed contents of the current zip entry and wrap in an input + // stream. This is done because ZipInputStreams can't be reset so they can only be + // read once, and classes that don't need rewriting need to be read twice, first to + // parse and then to copy. + byte[] currentEntryBytes = zipInputStream.readAllBytes(); + ByteArrayInputStream currentEntryInputStream = + new ByteArrayInputStream(currentEntryBytes); + ByteArrayOutputStream outputBuffer = new ByteArrayOutputStream(); + boolean handled = processClassEntry(entry, currentEntryInputStream, outputBuffer); + + ZipEntry newEntry = new ZipEntry(entry.getName()); + newEntry.setTime(entry.getTime()); + zipOutputStream.putNextEntry(newEntry); if (handled) { - ZipEntry newEntry = new ZipEntry(entry.getName()); - zipOutputStream.putNextEntry(newEntry); - zipOutputStream.write(buffer.toByteArray(), 0, buffer.size()); + zipOutputStream.write(outputBuffer.toByteArray(), 0, outputBuffer.size()); } else { - zipOutputStream.putNextEntry(entry); - zipInputStream.transferTo(zipOutputStream); + // processClassEntry may have advanced currentEntryInputStream, so reset it to + // copy zip entry contents unmodified. + currentEntryInputStream.reset(); + currentEntryInputStream.transferTo(zipOutputStream); } + zipOutputStream.closeEntry(); } + + zipOutputStream.finish(); } catch (IOException e) { throw new RuntimeException(e); } @@ -76,6 +98,9 @@ private boolean processClassEntry( } try { ClassReader reader = new ClassReader(inputStream); + if (!shouldRewriteClass(reader)) { + return false; + } ClassWriter writer = new ClassWriter(reader, ClassWriter.COMPUTE_FRAMES); ClassVisitor classVisitor = getClassVisitorForClass(entry.getName(), writer); reader.accept(classVisitor, ClassReader.EXPAND_FRAMES); diff --git a/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java b/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java index 9f45df511782..a997bf05a3d3 100644 --- a/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java +++ b/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java @@ -1,4 +1,4 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. +// Copyright 2018 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -71,6 +71,11 @@ private static void validateClass(ClassLoader classLoader, String className) // API. return; } + if (className.matches("^android\\b.*")) { + // OS APIs sometime pop up in prebuilts. Rather than force prebuilt targets to set a + // proper alternative_android_sdk_dep, just ignore android.* + return; + } try { classLoader.loadClass(className.replace('/', '.')); } catch (ClassNotFoundException e) { diff --git a/build/android/bytecode/java/org/chromium/bytecode/EmptyOverrideGeneratorClassAdapter.java b/build/android/bytecode/java/org/chromium/bytecode/EmptyOverrideGeneratorClassAdapter.java new file mode 100644 index 000000000000..3cf3a83d4d75 --- /dev/null +++ b/build/android/bytecode/java/org/chromium/bytecode/EmptyOverrideGeneratorClassAdapter.java @@ -0,0 +1,104 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.bytecode; + +import static org.objectweb.asm.Opcodes.ACC_ABSTRACT; +import static org.objectweb.asm.Opcodes.ACC_INTERFACE; +import static org.objectweb.asm.Opcodes.ALOAD; +import static org.objectweb.asm.Opcodes.ASM7; +import static org.objectweb.asm.Opcodes.ILOAD; +import static org.objectweb.asm.Opcodes.INVOKESPECIAL; +import static org.objectweb.asm.Opcodes.IRETURN; + +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Type; + +import java.util.ArrayList; + +class EmptyOverrideGeneratorClassAdapter extends ClassVisitor { + private final ArrayList mMethodsToGenerate; + private String mSuperClassName; + private boolean mIsAbstract; + private boolean mIsInterface; + + public EmptyOverrideGeneratorClassAdapter( + ClassVisitor cv, ArrayList methodsToGenerate) { + super(ASM7, cv); + mMethodsToGenerate = methodsToGenerate; + } + + @Override + public void visit(int version, int access, String name, String signature, String superName, + String[] interfaces) { + super.visit(version, access, name, signature, superName, interfaces); + + mSuperClassName = superName; + mIsAbstract = (access & ACC_ABSTRACT) == ACC_ABSTRACT; + mIsInterface = (access & ACC_INTERFACE) == ACC_INTERFACE; + } + + @Override + public void visitEnd() { + if (mIsAbstract || mIsInterface || mMethodsToGenerate.isEmpty()) { + super.visitEnd(); + return; + } + + for (MethodDescription method : mMethodsToGenerate) { + if (!method.shouldCreateOverride) { + continue; + } + + MethodVisitor mv = super.visitMethod( + method.access, method.methodName, method.description, null, null); + writeOverrideCode(mv, method.access, method.methodName, method.description); + } + + super.visitEnd(); + } + + /** + * Writes code to a method to call that method's parent implementation. + *
+     * {@code
+     * // Calling writeOverrideCode(mv, ACC_PUBLIC, "doFoo", "(Ljava/lang/String;)I") writes the
+     * following method body: public int doFoo(String arg){ return super.doFoo(arg);
+     * }
+     * }
+     * 
+ * + * This will be rewritten later by TraceEventAdderClassAdapter to wrap the body in a trace + * event. + */ + private void writeOverrideCode( + MethodVisitor mv, final int access, final String name, final String descriptor) { + assert access != 0; + Type[] argTypes = Type.getArgumentTypes(descriptor); + Type returnType = Type.getReturnType(descriptor); + + mv.visitCode(); + + // Variable 0 contains `this`, load it into the operand stack. + mv.visitVarInsn(ALOAD, 0); + + // Variables 1..n contain all arguments, load them all into the operand stack. + int i = 1; + for (Type arg : argTypes) { + // getOpcode(ILOAD) returns the ILOAD equivalent to the current argument's type. + mv.visitVarInsn(arg.getOpcode(ILOAD), i); + i += arg.getSize(); + } + + // Call the parent class method with the same arguments. + mv.visitMethodInsn(INVOKESPECIAL, mSuperClassName, name, descriptor, false); + + // Return the result. + mv.visitInsn(returnType.getOpcode(IRETURN)); + + mv.visitMaxs(0, 0); + mv.visitEnd(); + } +} diff --git a/build/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java b/build/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java index a40f39c4ce8d..0966be0b303b 100644 --- a/build/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java +++ b/build/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -13,6 +13,7 @@ import java.io.File; import java.io.IOException; +import java.lang.reflect.Method; /** * Java application that modifies Fragment.getActivity() to return an Activity instead of a @@ -75,11 +76,29 @@ protected ClassVisitor getClassVisitorForClass(String classPath, ClassVisitor de * the replaced method. */ private static class InvocationReplacer extends ClassVisitor { + /** + * A ClassLoader that will resolve R classes to Object. + * + * R won't be in our classpath, and we don't access any information about them, so resolving + * it to a dummy value is fine. + */ + private static class ResourceStubbingClassLoader extends ClassLoader { + @Override + protected Class findClass(String name) throws ClassNotFoundException { + if (name.matches(".*\\.R(\\$.+)?")) { + return Object.class; + } + return super.findClass(name); + } + } + private final boolean mSingleAndroidX; + private final ClassLoader mClassLoader; private InvocationReplacer(ClassVisitor baseVisitor, boolean singleAndroidX) { super(Opcodes.ASM7, baseVisitor); mSingleAndroidX = singleAndroidX; + mClassLoader = new ResourceStubbingClassLoader(); } @Override @@ -90,6 +109,28 @@ public MethodVisitor visitMethod( @Override public void visitMethodInsn(int opcode, String owner, String name, String descriptor, boolean isInterface) { + // Change the return type of getActivity and replaceActivity. + if (isActivityGetterInvocation(opcode, owner, name, descriptor)) { + super.visitMethodInsn( + opcode, owner, name, NEW_METHOD_DESCRIPTOR, isInterface); + if (mSingleAndroidX) { + super.visitTypeInsn( + Opcodes.CHECKCAST, "androidx/fragment/app/FragmentActivity"); + } + } else if (isDowncastableFragmentActivityMethodInvocation( + opcode, owner, name, descriptor)) { + // Replace FragmentActivity.foo() with Activity.foo() to fix cases where the + // above code changed the getActivity return type. See the + // isDowncastableFragmentActivityMethodInvocation documentation for details. + super.visitMethodInsn( + opcode, "android/app/Activity", name, descriptor, isInterface); + } else { + super.visitMethodInsn(opcode, owner, name, descriptor, isInterface); + } + } + + private boolean isActivityGetterInvocation( + int opcode, String owner, String name, String descriptor) { boolean isFragmentGetActivity = name.equals(GET_ACTIVITY_METHOD_NAME) && descriptor.equals(OLD_METHOD_DESCRIPTOR) && isFragmentSubclass(owner); @@ -100,39 +141,63 @@ public void visitMethodInsn(int opcode, String owner, String name, name.equals(GET_LIFECYCLE_ACTIVITY_METHOD_NAME) && descriptor.equals(OLD_METHOD_DESCRIPTOR) && owner.equals(SUPPORT_LIFECYCLE_FRAGMENT_IMPL_BINARY_NAME); - if ((opcode == Opcodes.INVOKEVIRTUAL || opcode == Opcodes.INVOKESPECIAL) + return (opcode == Opcodes.INVOKEVIRTUAL || opcode == Opcodes.INVOKESPECIAL) && (isFragmentGetActivity || isFragmentRequireActivity - || isSupportLifecycleFragmentImplGetLifecycleActivity)) { - super.visitMethodInsn( - opcode, owner, name, NEW_METHOD_DESCRIPTOR, isInterface); - if (mSingleAndroidX) { - super.visitTypeInsn( - Opcodes.CHECKCAST, "androidx/fragment/app/FragmentActivity"); - } - } else { - super.visitMethodInsn(opcode, owner, name, descriptor, isInterface); - } + || isSupportLifecycleFragmentImplGetLifecycleActivity); } - private boolean isFragmentSubclass(String internalType) { - // Look up classes with a ClassLoader that will resolve any R classes to Object. - // This is fine in this case as resource classes shouldn't be in the class - // hierarchy of any Fragments. - ClassLoader resourceStubbingClassLoader = new ClassLoader() { - @Override - protected Class findClass(String name) throws ClassNotFoundException { - if (name.matches(".*\\.R(\\$.+)?")) { - return Object.class; + /** + * Returns true if the given method belongs to FragmentActivity, and also exists on + * Activity. + * + * The Java code `requireActivity().getClassLoader()` will compile to the following + * bytecode: + * aload_0 + * // Method requireActivity:()Landroid/app/Activity; + * invokevirtual #n + * // Method androidx/fragment/app/FragmentActivity.getClassLoader:()LClassLoader; + * invokevirtual #m + * + * The second invokevirtual instruction doesn't typecheck because the + * requireActivity() return type was changed from FragmentActivity to Activity. Note + * that this is only an issue when validating the bytecode on the JVM, not in + * Dalvik, so while the above code works on device, it fails in robolectric tests. + * + * To fix the example above, we'd replace the second invokevirtual call with a call + * to android/app/Activity.getClassLoader:()Ljava/lang/ClassLoader. In general, any + * call to FragmentActivity.foo, where foo also exists on Activity, will be replaced + * with a call to Activity.foo. Activity.foo will still resolve to + * FragmentActivity.foo at runtime, while typechecking in robolectric tests. + */ + private boolean isDowncastableFragmentActivityMethodInvocation( + int opcode, String owner, String name, String descriptor) { + // Return if this isn't an invoke instruction on a FragmentActivity. + if (!(opcode == Opcodes.INVOKEVIRTUAL || opcode == Opcodes.INVOKESPECIAL) + || !owner.equals("androidx/fragment/app/FragmentActivity")) { + return false; + } + try { + // Check if the method exists in Activity. + Class activity = mClassLoader.loadClass("android.app.Activity"); + for (Method activityMethod : activity.getMethods()) { + if (activityMethod.getName().equals(name) + && Type.getMethodDescriptor(activityMethod) + .equals(descriptor)) { + return true; } - return super.findClass(name); } - }; + return false; + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } + private boolean isFragmentSubclass(String internalType) { // This doesn't use Class#isAssignableFrom to avoid us needing to load // AndroidX's Fragment class, which may not be on the classpath. try { String binaryName = Type.getObjectType(internalType).getClassName(); - Class clazz = resourceStubbingClassLoader.loadClass(binaryName); + Class clazz = mClassLoader.loadClass(binaryName); while (clazz != null) { if (clazz.getName().equals("androidx.fragment.app.Fragment")) { return true; diff --git a/build/android/bytecode/java/org/chromium/bytecode/MethodCheckerClassAdapter.java b/build/android/bytecode/java/org/chromium/bytecode/MethodCheckerClassAdapter.java new file mode 100644 index 000000000000..6794a77a6745 --- /dev/null +++ b/build/android/bytecode/java/org/chromium/bytecode/MethodCheckerClassAdapter.java @@ -0,0 +1,144 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.bytecode; + +import static org.objectweb.asm.ClassReader.EXPAND_FRAMES; +import static org.objectweb.asm.Opcodes.ACC_ABSTRACT; +import static org.objectweb.asm.Opcodes.ACC_INTERFACE; +import static org.objectweb.asm.Opcodes.ASM7; + +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.MethodVisitor; + +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; + +/** + * This ClassVisitor verifies that a class and its methods are suitable for rewriting. + * Given a class and a list of methods it performs the following checks: + * 1. Class is subclass of a class that we want to trace. + * 2. Class is not abstract or an interface. + * + * For each method provided in {@code methodsToCheck}: + * If the class overrides the method then we can rewrite it directly. + * If the class doesn't override the method then we can generate an override with {@link + * EmptyOverrideGeneratorClassAdapter}, but first we must check if the parent method is private or + * final using {@link ParentMethodCheckerClassAdapter}. + * + * This adapter modifies the provided method list to indicate which methods should be overridden or + * skipped. + */ +class MethodCheckerClassAdapter extends ClassVisitor { + private static final String VIEW_CLASS_DESCRIPTOR = "android/view/View"; + private static final String ANIMATOR_UPDATE_LISTENER_CLASS_DESCRIPTOR = + "android/animation/ValueAnimator$AnimatorUpdateListener"; + private static final String ANIMATOR_LISTENER_CLASS_DESCRIPTOR = + "android/animation/Animator$AnimatorListener"; + + private final ArrayList mMethodsToCheck; + private final ClassLoader mJarClassLoader; + private String mSuperName; + + public MethodCheckerClassAdapter( + ArrayList methodsToCheck, ClassLoader jarClassLoader) { + super(ASM7); + mMethodsToCheck = methodsToCheck; + mJarClassLoader = jarClassLoader; + } + + @Override + public void visit(int version, int access, String name, String signature, String superName, + String[] interfaces) { + super.visit(version, access, name, signature, superName, interfaces); + + mSuperName = superName; + + boolean isAbstract = (access & ACC_ABSTRACT) == ACC_ABSTRACT; + boolean isInterface = (access & ACC_INTERFACE) == ACC_INTERFACE; + + if (isAbstract || isInterface || !shouldTraceClass(name)) { + mMethodsToCheck.clear(); + return; + } + } + + @Override + public MethodVisitor visitMethod( + int access, String name, String descriptor, String signature, String[] exceptions) { + if (mMethodsToCheck.isEmpty()) { + return super.visitMethod(access, name, descriptor, signature, exceptions); + } + + for (MethodDescription method : mMethodsToCheck) { + if (method.methodName.equals(name) && method.description.equals(descriptor)) { + method.shouldCreateOverride = false; + } + } + + return super.visitMethod(access, name, descriptor, signature, exceptions); + } + + @Override + public void visitEnd() { + if (mMethodsToCheck.isEmpty()) { + super.visitEnd(); + return; + } + + boolean areAnyUncheckedMethods = false; + + for (MethodDescription method : mMethodsToCheck) { + if (method.shouldCreateOverride == null) { + areAnyUncheckedMethods = true; + break; + } + } + + if (areAnyUncheckedMethods) { + checkParentClass(mSuperName, mMethodsToCheck, mJarClassLoader); + } + + super.visitEnd(); + } + + private boolean shouldTraceClass(String desc) { + Class clazz = getClass(desc); + return isClassDerivedFrom(clazz, VIEW_CLASS_DESCRIPTOR) + || isClassDerivedFrom(clazz, ANIMATOR_UPDATE_LISTENER_CLASS_DESCRIPTOR) + || isClassDerivedFrom(clazz, ANIMATOR_LISTENER_CLASS_DESCRIPTOR); + } + + private boolean isClassDerivedFrom(Class clazz, String classDescriptor) { + Class superClass = getClass(classDescriptor); + if (clazz == null || superClass == null) return false; + return superClass.isAssignableFrom(clazz); + } + + private Class getClass(String desc) { + try { + return mJarClassLoader.loadClass(desc.replace('/', '.')); + } catch (ClassNotFoundException | NoClassDefFoundError | IllegalAccessError e) { + return null; + } + } + + static void checkParentClass(String superClassName, ArrayList methodsToCheck, + ClassLoader jarClassLoader) { + try { + ClassReader cr = new ClassReader(getClassAsStream(jarClassLoader, superClassName)); + ParentMethodCheckerClassAdapter parentChecker = + new ParentMethodCheckerClassAdapter(methodsToCheck, jarClassLoader); + cr.accept(parentChecker, EXPAND_FRAMES); + } catch (IOException ex) { + // Ignore errors in case class can't be loaded. + } + } + + private static InputStream getClassAsStream(ClassLoader jarClassLoader, String desc) { + return jarClassLoader.getResourceAsStream(desc.replace('.', '/') + ".class"); + } +} diff --git a/build/android/bytecode/java/org/chromium/bytecode/MethodDescription.java b/build/android/bytecode/java/org/chromium/bytecode/MethodDescription.java new file mode 100644 index 000000000000..26717c0616b2 --- /dev/null +++ b/build/android/bytecode/java/org/chromium/bytecode/MethodDescription.java @@ -0,0 +1,20 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.bytecode; + +class MethodDescription { + public final String methodName; + public final String description; + public final int access; + public Boolean shouldCreateOverride; + + public MethodDescription(String methodName, String description, int access) { + this.methodName = methodName; + this.description = description; + this.access = access; + // A null value means we haven't checked the method. + this.shouldCreateOverride = null; + } +} diff --git a/build/android/bytecode/java/org/chromium/bytecode/ParentMethodCheckerClassAdapter.java b/build/android/bytecode/java/org/chromium/bytecode/ParentMethodCheckerClassAdapter.java new file mode 100644 index 000000000000..4656c34ab5d4 --- /dev/null +++ b/build/android/bytecode/java/org/chromium/bytecode/ParentMethodCheckerClassAdapter.java @@ -0,0 +1,109 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.bytecode; + +import static org.objectweb.asm.Opcodes.ACC_FINAL; +import static org.objectweb.asm.Opcodes.ACC_PRIVATE; +import static org.objectweb.asm.Opcodes.ACC_PROTECTED; +import static org.objectweb.asm.Opcodes.ACC_PUBLIC; +import static org.objectweb.asm.Opcodes.ASM7; + +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.MethodVisitor; + +import java.util.ArrayList; + +/** + * This ClassVisitor checks if the given class overrides methods on {@code methodsToCheck}, and if + * so it determines whether they can be overridden by a child class. If at the end any unchecked + * methods remain then we recurse on the class's superclass. + */ +class ParentMethodCheckerClassAdapter extends ClassVisitor { + private static final String OBJECT_CLASS_DESCRIPTOR = "java/lang/Object"; + + private final ArrayList mMethodsToCheck; + private final ClassLoader mJarClassLoader; + private String mSuperName; + private boolean mIsCheckingObjectClass; + + public ParentMethodCheckerClassAdapter( + ArrayList methodsToCheck, ClassLoader jarClassLoader) { + super(ASM7); + mMethodsToCheck = methodsToCheck; + mJarClassLoader = jarClassLoader; + } + + @Override + public void visit(int version, int access, String name, String signature, String superName, + String[] interfaces) { + super.visit(version, access, name, signature, superName, interfaces); + + if (name.equals(OBJECT_CLASS_DESCRIPTOR)) { + mIsCheckingObjectClass = true; + return; + } + + mSuperName = superName; + } + + @Override + public MethodVisitor visitMethod( + int access, String name, String descriptor, String signature, String[] exceptions) { + if (mIsCheckingObjectClass) { + return super.visitMethod(access, name, descriptor, signature, exceptions); + } + + for (MethodDescription methodToCheck : mMethodsToCheck) { + if (methodToCheck.shouldCreateOverride != null || !methodToCheck.methodName.equals(name) + || !methodToCheck.description.equals(descriptor)) { + continue; + } + + // This class contains methodToCheck. + boolean isMethodPrivate = (access & ACC_PRIVATE) == ACC_PRIVATE; + boolean isMethodFinal = (access & ACC_FINAL) == ACC_FINAL; + boolean isMethodPackagePrivate = + (access & (ACC_PUBLIC | ACC_PROTECTED | ACC_PRIVATE)) == 0; + + // If the method is private or final then don't create an override. + methodToCheck.shouldCreateOverride = + !isMethodPrivate && !isMethodFinal && !isMethodPackagePrivate; + } + + return super.visitMethod(access, name, descriptor, signature, exceptions); + } + + @Override + public void visitEnd() { + if (mIsCheckingObjectClass) { + // We support tracing methods that are defined in classes that are derived from View, + // but are not defined in View itself. If we've reached the Object class in the + // hierarchy, it means the method doesn't exist in this hierarchy, so don't override it, + // and stop looking for it. + for (MethodDescription method : mMethodsToCheck) { + if (method.shouldCreateOverride == null) { + method.shouldCreateOverride = false; + } + } + return; + } + + boolean areAnyUncheckedMethods = false; + + for (MethodDescription method : mMethodsToCheck) { + if (method.shouldCreateOverride == null) { + areAnyUncheckedMethods = true; + break; + } + } + + if (areAnyUncheckedMethods) { + MethodCheckerClassAdapter.checkParentClass( + mSuperName, mMethodsToCheck, mJarClassLoader); + } + + super.visitEnd(); + } +} diff --git a/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdder.java b/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdder.java new file mode 100644 index 000000000000..4a8515951f48 --- /dev/null +++ b/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdder.java @@ -0,0 +1,109 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.bytecode; + +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.Opcodes; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; + +/** + * Java application that modifies all implementations of "draw", "onMeasure" and "onLayout" on all + * {@link android.view.View} subclasses to wrap them in trace events. + */ +public class TraceEventAdder extends ByteCodeRewriter { + private final ClassLoader mClassPathJarsClassLoader; + private ArrayList mMethodsToTrace; + + public static void main(String[] args) throws IOException { + // Invoke this script using //build/android/gyp/trace_event_bytecode_rewriter.py + + if (args.length < 2) { + System.err.println("Expected arguments: <':' separated list with N input jar paths> " + + "<':' separated list with N output jar paths>"); + System.exit(1); + } + + String[] inputJars = args[0].split(":"); + String[] outputJars = args[1].split(":"); + + assert inputJars.length + == outputJars.length : "Input and output lists are not the same length. Inputs: " + + inputJars.length + " Outputs: " + outputJars.length; + + // outputJars[n] must be the same as inputJars[n] but with a suffix, validate this. + for (int i = 0; i < inputJars.length; i++) { + File inputJarPath = new File(inputJars[i]); + String inputJarFilename = inputJarPath.getName(); + File outputJarPath = new File(outputJars[i]); + + String inputFilenameNoExtension = + inputJarFilename.substring(0, inputJarFilename.lastIndexOf(".jar")); + + assert outputJarPath.getName().startsWith(inputFilenameNoExtension); + } + + ArrayList classPathJarsPaths = new ArrayList<>(); + classPathJarsPaths.addAll(Arrays.asList(inputJars)); + ClassLoader classPathJarsClassLoader = ByteCodeProcessor.loadJars(classPathJarsPaths); + + TraceEventAdder adder = new TraceEventAdder(classPathJarsClassLoader); + for (int i = 0; i < inputJars.length; i++) { + adder.rewrite(new File(inputJars[i]), new File(outputJars[i])); + } + } + + public TraceEventAdder(ClassLoader classPathJarsClassLoader) { + mClassPathJarsClassLoader = classPathJarsClassLoader; + } + + @Override + protected boolean shouldRewriteClass(String classPath) { + return true; + } + + @Override + protected boolean shouldRewriteClass(ClassReader classReader) { + mMethodsToTrace = new ArrayList<>(Arrays.asList( + // Methods on View.java + new MethodDescription( + "dispatchTouchEvent", "(Landroid/view/MotionEvent;)Z", Opcodes.ACC_PUBLIC), + new MethodDescription("draw", "(Landroid/graphics/Canvas;)V", Opcodes.ACC_PUBLIC), + new MethodDescription("onMeasure", "(II)V", Opcodes.ACC_PROTECTED), + new MethodDescription("onLayout", "(ZIIII)V", Opcodes.ACC_PROTECTED), + // Methods on RecyclerView.java in AndroidX + new MethodDescription("scrollStep", "(II[I)V", 0), + // Methods on Animator.AnimatorListener + new MethodDescription( + "onAnimationStart", "(Landroid/animation/Animator;)V", Opcodes.ACC_PUBLIC), + new MethodDescription( + "onAnimationEnd", "(Landroid/animation/Animator;)V", Opcodes.ACC_PUBLIC), + // Methods on ValueAnimator.AnimatorUpdateListener + new MethodDescription("onAnimationUpdate", "(Landroid/animation/ValueAnimator;)V", + Opcodes.ACC_PUBLIC))); + + // This adapter will modify mMethodsToTrace to indicate which methods already exist in the + // class and which ones need to be overridden. In case the class is not an Android view + // we'll clear the list and skip rewriting. + MethodCheckerClassAdapter methodChecker = + new MethodCheckerClassAdapter(mMethodsToTrace, mClassPathJarsClassLoader); + + classReader.accept(methodChecker, ClassReader.EXPAND_FRAMES); + + return !mMethodsToTrace.isEmpty(); + } + + @Override + protected ClassVisitor getClassVisitorForClass(String classPath, ClassVisitor delegate) { + ClassVisitor chain = new TraceEventAdderClassAdapter(delegate, mMethodsToTrace); + chain = new EmptyOverrideGeneratorClassAdapter(chain, mMethodsToTrace); + + return chain; + } +} diff --git a/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdderClassAdapter.java b/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdderClassAdapter.java new file mode 100644 index 000000000000..f2d03fbcc781 --- /dev/null +++ b/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdderClassAdapter.java @@ -0,0 +1,47 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.bytecode; + +import static org.objectweb.asm.Opcodes.ASM7; + +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.MethodVisitor; + +import java.util.ArrayList; + +/** + * A ClassVisitor for adding TraceEvent.begin and TraceEvent.end methods to any methods specified in + * a list. + */ +class TraceEventAdderClassAdapter extends ClassVisitor { + private final ArrayList mMethodsToTrace; + private String mShortClassName; + + TraceEventAdderClassAdapter(ClassVisitor visitor, ArrayList methodsToTrace) { + super(ASM7, visitor); + mMethodsToTrace = methodsToTrace; + } + + @Override + public void visit(int version, int access, String name, String signature, String superName, + String[] interfaces) { + super.visit(version, access, name, signature, superName, interfaces); + mShortClassName = name.substring(name.lastIndexOf('/') + 1); + } + + @Override + public MethodVisitor visitMethod(final int access, final String name, String desc, + String signature, String[] exceptions) { + MethodVisitor mv = super.visitMethod(access, name, desc, signature, exceptions); + + for (MethodDescription method : mMethodsToTrace) { + if (method.methodName.equals(name) && method.description.equals(desc)) { + return new TraceEventAdderMethodAdapter(mv, mShortClassName, name); + } + } + + return mv; + } +} diff --git a/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdderMethodAdapter.java b/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdderMethodAdapter.java new file mode 100644 index 000000000000..11f2a273c93c --- /dev/null +++ b/build/android/bytecode/java/org/chromium/bytecode/TraceEventAdderMethodAdapter.java @@ -0,0 +1,83 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.bytecode; + +import static org.objectweb.asm.Opcodes.ASM7; +import static org.objectweb.asm.Opcodes.ATHROW; +import static org.objectweb.asm.Opcodes.INVOKESTATIC; +import static org.objectweb.asm.Opcodes.IRETURN; +import static org.objectweb.asm.Opcodes.RETURN; + +import static org.chromium.bytecode.TypeUtils.STRING; +import static org.chromium.bytecode.TypeUtils.VOID; + +import org.objectweb.asm.MethodVisitor; + +/** + * MethodVisitor that wraps all code in TraceEvent.begin and TraceEvent.end calls. TraceEvent.end + * calls are added on all returns and thrown exceptions. + * + * Example: + *
+ *   {@code
+ *      int methodToTrace(String foo){
+ *
+ *        //Line added by rewriter:
+ *        TraceEvent.begin("ClassName.methodToTrace");
+ *
+ *        if(foo == null){
+ *          //Line added by rewriter:
+ *          TraceEvent.end("ClassName.methodToTrace");
+ *
+ *          throw new Exception();
+ *        }
+ *        else if(foo.equals("Two")){
+ *          //Line added by rewriter:
+ *          TraceEvent.end("ClassName.methodToTrace");
+ *
+ *          return 2;
+ *        }
+ *
+ *        //Line added by rewriter:
+ *        TraceEvent.end("ClassName.methodToTrace");
+ *
+ *        return 0;
+ *      }
+ *   }
+ * 
+ * + */ +class TraceEventAdderMethodAdapter extends MethodVisitor { + private static final String TRACE_EVENT_DESCRIPTOR = "org/chromium/base/TraceEvent"; + private static final String TRACE_EVENT_SIGNATURE = TypeUtils.getMethodDescriptor(VOID, STRING); + private final String mEventName; + + public TraceEventAdderMethodAdapter( + MethodVisitor methodVisitor, String shortClassName, String methodName) { + super(ASM7, methodVisitor); + + mEventName = shortClassName + "." + methodName; + } + + @Override + public void visitCode() { + super.visitCode(); + + mv.visitLdcInsn(mEventName); + mv.visitMethodInsn( + INVOKESTATIC, TRACE_EVENT_DESCRIPTOR, "begin", TRACE_EVENT_SIGNATURE, false); + } + + @Override + public void visitInsn(int opcode) { + if ((opcode >= IRETURN && opcode <= RETURN) || opcode == ATHROW) { + mv.visitLdcInsn(mEventName); + mv.visitMethodInsn( + INVOKESTATIC, TRACE_EVENT_DESCRIPTOR, "end", TRACE_EVENT_SIGNATURE, false); + } + + mv.visitInsn(opcode); + } +} diff --git a/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java b/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java index ed2dc2dc24ba..e62a912f8779 100644 --- a/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java +++ b/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java @@ -1,4 +1,4 @@ -// Copyright 2017 The Chromium Authors. All rights reserved. +// Copyright 2017 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/chromium_annotations.flags b/build/android/chromium_annotations.flags new file mode 100644 index 000000000000..e3f7afa3f29d --- /dev/null +++ b/build/android/chromium_annotations.flags @@ -0,0 +1,79 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Contains flags related to annotations in //build/android that can be safely +# shared with Cronet, and thus would be appropriate for third-party apps to +# include. + +# Keep all annotation related attributes that can affect runtime +-keepattributes RuntimeVisible*Annotations +-keepattributes AnnotationDefault + +# Keep the annotations, because if we don't, the ProGuard rules that use them +# will not be respected. These classes then show up in our final dex, which we +# do not want - see crbug.com/628226. +-keep @interface org.chromium.base.annotations.AccessedByNative +-keep @interface org.chromium.base.annotations.CalledByNative +-keep @interface org.chromium.base.annotations.CalledByNativeUnchecked +-keep @interface org.chromium.build.annotations.DoNotInline +-keep @interface org.chromium.build.annotations.UsedByReflection +-keep @interface org.chromium.build.annotations.IdentifierNameString + +# Keeps for class level annotations. +-keep,allowaccessmodification @org.chromium.build.annotations.UsedByReflection class ** {} + +# Keeps for method level annotations. +-keepclasseswithmembers,allowaccessmodification class ** { + @org.chromium.base.annotations.AccessedByNative ; +} +-keepclasseswithmembers,includedescriptorclasses,allowaccessmodification class ** { + @org.chromium.base.annotations.CalledByNative ; +} +-keepclasseswithmembers,includedescriptorclasses,allowaccessmodification class ** { + @org.chromium.base.annotations.CalledByNativeUnchecked ; +} +-keepclasseswithmembers,allowaccessmodification class ** { + @org.chromium.build.annotations.UsedByReflection ; +} +-keepclasseswithmembers,allowaccessmodification class ** { + @org.chromium.build.annotations.UsedByReflection ; +} + +# Never inline classes, methods, or fields with this annotation, but allow +# shrinking and obfuscation. +# Relevant to fields when they are needed to store strong references to objects +# that are held as weak references by native code. +-if @org.chromium.build.annotations.DoNotInline class * { + *** *(...); +} +-keep,allowobfuscation,allowaccessmodification class <1> { + *** <2>(...); +} +-keepclassmembers,allowobfuscation,allowaccessmodification class * { + @org.chromium.build.annotations.DoNotInline ; +} +-keepclassmembers,allowobfuscation,allowaccessmodification class * { + @org.chromium.build.annotations.DoNotInline ; +} + +-alwaysinline class * { + @org.chromium.build.annotations.AlwaysInline *; +} + +# Keep all logs (Log.VERBOSE = 2). R8 does not allow setting to 0. +-maximumremovedandroidloglevel 1 class ** { + @org.chromium.build.annotations.DoNotStripLogs ; +} +-maximumremovedandroidloglevel 1 @org.chromium.build.annotations.DoNotStripLogs class ** { + ; +} + +# Never merge classes horizontally or vertically with this annotation. +# Relevant to classes being used as a key in maps or sets. +-keep,allowaccessmodification,allowobfuscation,allowshrinking @org.chromium.build.annotations.DoNotClassMerge class * + +# Mark members annotated with IdentifierNameString as identifier name strings +-identifiernamestring class * { + @org.chromium.build.annotations.IdentifierNameString *; +} diff --git a/build/android/convert_dex_profile.py b/build/android/convert_dex_profile.py index f9fdeb6793b0..13a48edfdeb6 100755 --- a/build/android/convert_dex_profile.py +++ b/build/android/convert_dex_profile.py @@ -1,11 +1,12 @@ -#!/usr/bin/env vpython +#!/usr/bin/env vpython3 # -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import argparse import collections +import functools import logging import re import subprocess @@ -66,7 +67,9 @@ 'double': 'D' } -class Method(object): + +@functools.total_ordering +class Method: def __init__(self, name, class_name, param_types=None, return_type=None): self.name = name self.class_name = class_name @@ -81,16 +84,23 @@ def __repr__(self): return 'Method<{}->{}({}){}>'.format(self.class_name, self.name, self.param_types or '', self.return_type or '') - def __cmp__(self, other): - return cmp((self.class_name, self.name, self.param_types, self.return_type), - (other.class_name, other.name, other.param_types, other.return_type)) + @staticmethod + def serialize(method): + return (method.class_name, method.name, method.param_types, + method.return_type) + + def __eq__(self, other): + return self.serialize(self) == self.serialize(other) + + def __lt__(self, other): + return self.serialize(self) < self.serialize(other) def __hash__(self): # only hash name and class_name since other fields may not be set yet. return hash((self.name, self.class_name)) -class Class(object): +class Class: def __init__(self, name): self.name = name self._methods = [] @@ -149,13 +159,13 @@ def FindMethodsAtLine(self, method_name, line_start, line_end=None): logging.warning('ambigous methods in dex %s at lines %s in class "%s"', found_methods, hint_lines, self.name) return found_methods - else: - logging.warning('No method named "%s" in class "%s" is ' - 'mapped to lines %s', method_name, self.name, hint_lines) - return None + logging.warning( + 'No method named "%s" in class "%s" is ' + 'mapped to lines %s', method_name, self.name, hint_lines) + return None -class Profile(object): +class Profile: def __init__(self): # {Method: set(char)} self._methods = collections.defaultdict(set) @@ -178,7 +188,7 @@ def WriteToFile(self, path): output_profile.write(line) -class ProguardMapping(object): +class ProguardMapping: def __init__(self): # {Method: set(Method)} self._method_mapping = collections.defaultdict(set) @@ -214,7 +224,8 @@ def MapTypeDescriptorList(self, type_descriptor_list): class MalformedLineException(Exception): def __init__(self, message, line_number): - super(MalformedLineException, self).__init__(message) + super().__init__(message) + self.message = message self.line_number = line_number def __str__(self): @@ -230,7 +241,8 @@ class MalformedProfileException(MalformedLineException): def _RunDexDump(dexdump_path, dex_file_path): - return subprocess.check_output([dexdump_path, dex_file_path]).splitlines() + return subprocess.check_output([dexdump_path, + dex_file_path]).decode('utf-8').splitlines() def _ReadFile(file_path): diff --git a/build/android/convert_dex_profile_tests.py b/build/android/convert_dex_profile_tests.py old mode 100644 new mode 100755 index 0ddc5ce4a152..915d26387a4b --- a/build/android/convert_dex_profile_tests.py +++ b/build/android/convert_dex_profile_tests.py @@ -1,4 +1,5 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -41,36 +42,36 @@ positions : 0x0001 line=310 0x0057 line=313 - locals : + locals : #1 : (in La;) name : '' type : '()V' positions : - locals : + locals : Virtual methods - #0 : (in La;) name : 'a' type : '(Ljava/lang/String;)I' - positions : + positions : 0x0000 line=2 0x0003 line=3 0x001b line=8 - locals : - 0x0000 - 0x0021 reg=3 this La; + locals : + 0x0000 - 0x0021 reg=3 this La; #1 : (in La;) name : 'a' type : '(Ljava/lang/Object;)I' - positions : + positions : 0x0000 line=8 0x0003 line=9 - locals : - 0x0000 - 0x0021 reg=3 this La; + locals : + 0x0000 - 0x0021 reg=3 this La; #2 : (in La;) name : 'b' type : '()La;' - positions : + positions : 0x0000 line=1 - locals : + locals : """ # pylint: disable=line-too-long @@ -109,36 +110,36 @@ positions : 0x0001 line=310 0x0057 line=313 - locals : + locals : #1 : (in La;) name : '' type : '()V' positions : - locals : + locals : Virtual methods - #0 : (in La;) name : 'a' type : '(Ljava/lang/String;)I' - positions : + positions : 0x0000 line=2 0x0003 line=3 0x001b line=8 - locals : - 0x0000 - 0x0021 reg=3 this La; + locals : + 0x0000 - 0x0021 reg=3 this La; #1 : (in La;) name : 'c' type : '(Ljava/lang/Object;)I' - positions : + positions : 0x0000 line=8 0x0003 line=9 - locals : - 0x0000 - 0x0021 reg=3 this La; + locals : + 0x0000 - 0x0021 reg=3 this La; #2 : (in La;) name : 'b' type : '()La;' - positions : + positions : 0x0000 line=1 - locals : + locals : """ # pylint: disable=line-too-long @@ -167,14 +168,14 @@ def testProcessDex(self): dex = cp.ProcessDex(DEX_DUMP.splitlines()) self.assertIsNotNone(dex['a']) - self.assertEquals(len(dex['a'].FindMethodsAtLine('', 311, 313)), 1) - self.assertEquals(len(dex['a'].FindMethodsAtLine('', 309, 315)), 1) + self.assertEqual(len(dex['a'].FindMethodsAtLine('', 311, 313)), 1) + self.assertEqual(len(dex['a'].FindMethodsAtLine('', 309, 315)), 1) clinit = dex['a'].FindMethodsAtLine('', 311, 313)[0] - self.assertEquals(clinit.name, '') - self.assertEquals(clinit.return_type, 'V') - self.assertEquals(clinit.param_types, 'Ljava/lang/String;') + self.assertEqual(clinit.name, '') + self.assertEqual(clinit.return_type, 'V') + self.assertEqual(clinit.param_types, 'Ljava/lang/String;') - self.assertEquals(len(dex['a'].FindMethodsAtLine('a', 8, None)), 2) + self.assertEqual(len(dex['a'].FindMethodsAtLine('a', 8, None)), 2) self.assertIsNone(dex['a'].FindMethodsAtLine('a', 100, None)) # pylint: disable=protected-access @@ -183,7 +184,7 @@ def testProcessProguardMapping(self): mapping, reverse = cp.ProcessProguardMapping( PROGUARD_MAPPING.splitlines(), dex) - self.assertEquals('La;', reverse.GetClassMapping('Lorg/chromium/Original;')) + self.assertEqual('La;', reverse.GetClassMapping('Lorg/chromium/Original;')) getInstance = cp.Method( 'getInstance', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;') @@ -196,7 +197,7 @@ def testProcessProguardMapping(self): mapped = mapping.GetMethodMapping( cp.Method('a', 'La;', 'Ljava/lang/String;', 'I')) - self.assertEquals(len(mapped), 2) + self.assertEqual(len(mapped), 2) self.assertIn(getInstance, mapped) self.assertNotIn(subclassInit, mapped) self.assertNotIn( @@ -205,18 +206,18 @@ def testProcessProguardMapping(self): mapped = mapping.GetMethodMapping( cp.Method('a', 'La;', 'Ljava/lang/Object;', 'I')) - self.assertEquals(len(mapped), 1) + self.assertEqual(len(mapped), 1) self.assertIn(getInstance, mapped) mapped = mapping.GetMethodMapping(cp.Method('b', 'La;', '', 'La;')) - self.assertEquals(len(mapped), 1) + self.assertEqual(len(mapped), 1) self.assertIn(another, mapped) - for from_method, to_methods in mapping._method_mapping.iteritems(): + for from_method, to_methods in mapping._method_mapping.items(): for to_method in to_methods: self.assertIn(from_method, reverse.GetMethodMapping(to_method)) - for from_class, to_class in mapping._class_mapping.iteritems(): - self.assertEquals(from_class, reverse.GetClassMapping(to_class)) + for from_class, to_class in mapping._class_mapping.items(): + self.assertEqual(from_class, reverse.GetClassMapping(to_class)) def testProcessProfile(self): dex = cp.ProcessDex(DEX_DUMP.splitlines()) @@ -234,9 +235,9 @@ def testProcessProfile(self): self.assertIn(initialize, profile._methods) self.assertIn(another, profile._methods) - self.assertEquals(profile._methods[getInstance], set(['H', 'S', 'P'])) - self.assertEquals(profile._methods[initialize], set(['H', 'P'])) - self.assertEquals(profile._methods[another], set(['P'])) + self.assertEqual(profile._methods[getInstance], set(['H', 'S', 'P'])) + self.assertEqual(profile._methods[initialize], set(['H', 'P'])) + self.assertEqual(profile._methods[another], set(['P'])) def testEndToEnd(self): dex = cp.ProcessDex(DEX_DUMP.splitlines()) @@ -247,7 +248,7 @@ def testEndToEnd(self): profile.WriteToFile(temp.name) with open(temp.name, 'r') as f: for a, b in zip(sorted(f), sorted(UNOBFUSCATED_PROFILE.splitlines())): - self.assertEquals(a.strip(), b.strip()) + self.assertEqual(a.strip(), b.strip()) def testObfuscateProfile(self): with build_utils.TempDir() as temp_dir: @@ -269,7 +270,7 @@ def testObfuscateProfile(self): obfuscated_profile = sorted(obfuscated_file.readlines()) for a, b in zip( sorted(OBFUSCATED_PROFILE_2.splitlines()), obfuscated_profile): - self.assertEquals(a.strip(), b.strip()) + self.assertEqual(a.strip(), b.strip()) if __name__ == '__main__': diff --git a/build/android/dcheck_is_off.flags b/build/android/dcheck_is_off.flags index 78b9cc20b746..5718c27959e6 100644 --- a/build/android/dcheck_is_off.flags +++ b/build/android/dcheck_is_off.flags @@ -1,17 +1,12 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # Contains flags that are applied only when ENABLE_DCHECK=false. --checkdiscard @org.chromium.base.annotations.CheckDiscard class ** { +-checkdiscard @org.chromium.build.annotations.CheckDiscard class ** { *; } -checkdiscard class ** { - @org.chromium.base.annotations.CheckDiscard *; -} - -# Ensure @RemovableInRelease actually works. --checkdiscard class ** { - @org.chromium.base.annotations.RemovableInRelease *; + @org.chromium.build.annotations.CheckDiscard *; } diff --git a/build/android/devil_chromium.json b/build/android/devil_chromium.json index 0bfcfd8484ef..784406dbf16a 100644 --- a/build/android/devil_chromium.json +++ b/build/android/devil_chromium.json @@ -1,15 +1,6 @@ { "config_type": "BaseConfig", "dependencies": { - "aapt": { - "file_info": { - "linux2_x86_64": { - "local_paths": [ - "../../third_party/android_sdk/public/build-tools/27.0.3/aapt" - ] - } - } - }, "adb": { "file_info": { "linux2_x86_64": { @@ -19,15 +10,6 @@ } } }, - "android_build_tools_libc++": { - "file_info": { - "linux2_x86_64": { - "local_paths": [ - "../../third_party/android_sdk/public/build-tools/27.0.3/lib64/libc++.so" - ] - } - } - }, "android_sdk": { "file_info": { "linux2_x86_64": { @@ -37,24 +19,6 @@ } } }, - "dexdump": { - "file_info": { - "linux2_x86_64": { - "local_paths": [ - "../../third_party/android_sdk/public/build-tools/27.0.3/dexdump" - ] - } - } - }, - "split-select": { - "file_info": { - "linux2_x86_64": { - "local_paths": [ - "../../third_party/android_sdk/public/build-tools/27.0.3/split-select" - ] - } - } - }, "simpleperf": { "file_info": { "android_armeabi-v7a": { @@ -111,7 +75,7 @@ "file_info": { "default": { "local_paths": [ - "../../third_party/android_build_tools/bundletool/bundletool-all-1.4.0.jar" + "../../third_party/android_build_tools/bundletool/bundletool.jar" ] } } diff --git a/build/android/devil_chromium.py b/build/android/devil_chromium.py index 20ae1e3981f7..fbc538952d5b 100644 --- a/build/android/devil_chromium.py +++ b/build/android/devil_chromium.py @@ -1,4 +1,4 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -128,7 +128,7 @@ def _UseLocalBuildProducts(output_directory, devil_dynamic_config): for dep_config in dep_configs } } - for dep_name, dep_configs in _DEVIL_BUILD_PRODUCT_DEPS.iteritems() + for dep_name, dep_configs in _DEVIL_BUILD_PRODUCT_DEPS.items() } diff --git a/build/android/diff_resource_sizes.py b/build/android/diff_resource_sizes.py index eefb6cdb2091..ff21d8180498 100755 --- a/build/android/diff_resource_sizes.py +++ b/build/android/diff_resource_sizes.py @@ -1,11 +1,10 @@ -#!/usr/bin/env python -# Copyright 2017 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Runs resource_sizes.py on two apks and outputs the diff.""" -from __future__ import print_function import argparse import json @@ -49,8 +48,8 @@ def DiffResults(chartjson, base_results, diff_results): base_results: The chartjson-formatted size results of the base APK. diff_results: The chartjson-formatted size results of the diff APK. """ - for graph_title, graph in base_results['charts'].iteritems(): - for trace_title, trace in graph.iteritems(): + for graph_title, graph in base_results['charts'].items(): + for trace_title, trace in graph.items(): perf_tests_results_helper.ReportPerfResult( chartjson, graph_title, trace_title, diff_results['charts'][graph_title][trace_title]['value'] @@ -67,8 +66,8 @@ def AddIntermediateResults(chartjson, base_results, diff_results): base_results: The chartjson-formatted size results of the base APK. diff_results: The chartjson-formatted size results of the diff APK. """ - for graph_title, graph in base_results['charts'].iteritems(): - for trace_title, trace in graph.iteritems(): + for graph_title, graph in base_results['charts'].items(): + for trace_title, trace in graph.items(): perf_tests_results_helper.ReportPerfResult( chartjson, graph_title + '_base_apk', trace_title, trace['value'], trace['units'], trace['improvement_direction'], @@ -76,8 +75,8 @@ def AddIntermediateResults(chartjson, base_results, diff_results): # Both base_results and diff_results should have the same charts/traces, but # loop over them separately in case they don't - for graph_title, graph in diff_results['charts'].iteritems(): - for trace_title, trace in graph.iteritems(): + for graph_title, graph in diff_results['charts'].items(): + for trace_title, trace in graph.items(): perf_tests_results_helper.ReportPerfResult( chartjson, graph_title + '_diff_apk', trace_title, trace['value'], trace['units'], trace['improvement_direction'], @@ -194,6 +193,7 @@ def main(): logging.critical('Dumping diff histograms to %s', histogram_path) with open(histogram_path, 'w') as json_file: json_file.write(histogram_result.stdout) + return 0 if __name__ == '__main__': diff --git a/build/android/docs/README.md b/build/android/docs/README.md index 6392f7dd733e..5ee0ca638f16 100644 --- a/build/android/docs/README.md +++ b/build/android/docs/README.md @@ -1,6 +1,7 @@ # Android Build Docs -* [android_app_bundles.md](android_app_bundles.md) +* [//docs/android_build_instructions.md](/docs/android_build_instructions.md) +* [//docs/android_dynamic_feature_modules.md](/docs/android_dynamic_feature_modules.md) * [build_config.md](build_config.md) * [coverage.md](coverage.md) * [java_toolchain.md](java_toolchain.md) @@ -8,6 +9,8 @@ * [lint.md](lint.md) * [life_of_a_resource.md](life_of_a_resource.md) * [../incremental_install/README.md](../incremental_install/README.md) +* [//docs/ui/android/bytecode_rewriting.md](/docs/ui/android/bytecode_rewriting.md) +* [go/doubledown](https://goto.google.com/doubledown) (Googlers only) See also: * [//build/README.md](../../README.md) diff --git a/build/android/docs/android_app_bundles.md b/build/android/docs/android_app_bundles.md deleted file mode 100644 index e71fe27f3184..000000000000 --- a/build/android/docs/android_app_bundles.md +++ /dev/null @@ -1,205 +0,0 @@ -# Introduction - -This document describes how the Chromium build system supports Android app -bundles. - -[TOC] - -# Overview of app bundles - -An Android app bundle is an alternative application distribution format for -Android applications on the Google Play Store, that allows reducing the size -of binaries sent for installation to individual devices that run on Android L -and beyond. For more information about them, see the official Android -[documentation](https://developer.android.com/guide/app-bundle/). - -For the context of this document, the most important points are: - - - Unlike a regular APK (e.g. `foo.apk`), the bundle (e.g. `foo.aab`) cannot - be installed directly on a device. - - - Instead, it must be processed into a set of installable split APKs, which - are stored inside a special zip archive (e.g. `foo.apks`). - - - The splitting can be based on various criteria: e.g. language or screen - density for resources, or cpu ABI for native code. - - - The bundle also uses the notion of dynamic features modules (DFMs) to - separate several application features. Each module has its own code, assets - and resources, and can be installed separately from the rest of the - application if needed. - - - The main application itself is stored in the '`base`' module (this name - cannot be changed). - - -# Declaring app bundles with GN templates - -Here's an example that shows how to declare a simple bundle that contains a -single base module, which enables language-based splits: - -```gn - - # First declare the first bundle module. The base module is the one - # that contains the main application's code, resources and assets. - android_app_bundle_module("foo_base_module") { - # Declaration are similar to android_apk here. - ... - } - - # Second, declare the bundle itself. - android_app_bundle("foo_bundle") { - # Indicate the base module to use for this bundle - base_module_target = ":foo_base_module" - - # The name of our bundle file (without any suffix). Default would - # be 'foo_bundle' otherwise. - bundle_name = "FooBundle" - - # Enable language-based splits for this bundle. Which means that - # resources and assets specific to a given language will be placed - # into their own split APK in the final .apks archive. - enable_language_splits = true - - # Proguard settings must be passed at the bundle, not module, target. - proguard_enabled = !is_java_debug - } -``` - -When generating the `foo_bundle` target with Ninja, you will end up with -the following: - - - The bundle file under `out/Release/apks/FooBundle.aab` - - - A helper script called `out/Release/bin/foo_bundle`, which can be used - to install / launch / uninstall the bundle on local devices. - - This works like an APK wrapper script (e.g. `foo_apk`). Use `--help` - to see all possible commands supported by the script. - - -# Declaring dynamic feature modules with GN templates - -Please see -[Dynamic Feature Modules](../../../docs/android_dynamic_feature_modules.md) for -more details. In short, if you need more modules besides the base one, you -will need to list all the extra ones using the extra_modules variable which -takes a list of GN scopes, as in: - -```gn - - android_app_bundle_module("foo_base_module") { - ... - } - - android_app_bundle_module("foo_extra_module") { - ... - } - - android_app_bundle("foo_bundle") { - base_module_target = ":foo_base_module" - - extra_modules = [ - { # NOTE: Scopes require one field per line, and no comma separators. - name = "my_module" - module_target = ":foo_extra_module" - } - ] - - ... - } -``` - -Note that each extra module is identified by a unique name, which cannot -be '`base`'. - - -# Bundle signature issues - -Signing an app bundle is not necessary, unless you want to upload it to the -Play Store. Since this process is very slow (it uses `jarsigner` instead of -the much faster `apkbuilder`), you can control it with the `sign_bundle` -variable, as described in the example above. - -The `.apks` archive however always contains signed split APKs. The keystore -path/password/alias being used are the default ones, unless you use custom -values when declaring the bundle itself, as in: - -```gn - android_app_bundle("foo_bundle") { - ... - keystore_path = "//path/to/keystore" - keystore_password = "K3y$t0Re-Pa$$w0rd" - keystore_name = "my-signing-key-name" - } -``` - -These values are not stored in the bundle itself, but in the wrapper script, -which will use them to generate the `.apks` archive for you. This allows you -to properly install updates on top of existing applications on any device. - - -# Proguard and bundles - -When using an app bundle that is made of several modules, it is crucial to -ensure that proguard, if enabled: - -- Keeps the obfuscated class names used by each module consistent. -- Does not remove classes that are not used in one module, but referenced - by others. - -To achieve this, a special scheme called *synchronized proguarding* is -performed, which consists of the following steps: - -- The list of unoptimized .jar files from all modules are sent to a single - proguard command. This generates a new temporary optimized *group* .jar file. - -- Each module extracts the optimized class files from the optimized *group* - .jar file, to generate its own, module-specific, optimized .jar. - -- Each module-specific optimized .jar is then sent to dex generation. - -This synchronized proguarding step is added by the `android_app_bundle()` GN -template. In practice this means the following: - - - `proguard_enabled` must be passed to `android_app_bundle` targets, but not - to `android_app_bundle_module` ones. - - - `proguard_configs` can be still passed to individual modules, just - like regular APKs. All proguard configs will be merged during the - synchronized proguard step. - - -# Manual generation and installation of .apks archives - -Note that the `foo_bundle` script knows how to generate the .apks archive -from the bundle file, and install it to local devices for you. For example, -to install and launch a bundle, use: - -```sh - out/Release/bin/foo_bundle run -``` - -If you want to manually look or use the `.apks` archive, use the following -command to generate it: - -```sh - out/Release/bin/foo_bundle build-bundle-apks \ - --output-apks=/tmp/BundleFoo.apks -``` - -All split APKs within the archive will be properly signed. And you will be -able to look at its content (with `unzip -l`), or install it manually with: - -```sh - build/android/gyp/bundletool.py install-apks \ - --apks=/tmp/BundleFoo.apks \ - --adb=$(which adb) -``` - -The task of examining the manifest is simplified by running the following, -which dumps the application manifest as XML to stdout: - -```sh - build/android/gyp/bundletool.py dump-manifest -``` diff --git a/build/android/docs/build_config.md b/build/android/docs/build_config.md index 06a4873c18ef..8f752a66916b 100644 --- a/build/android/docs/build_config.md +++ b/build/android/docs/build_config.md @@ -1,19 +1,19 @@ # Introduction -This document describes the `.build_config` files that are used by the +This document describes the `.build_config.json` files that are used by the Chromium build system for Android-specific targets like APK, resources, and more. [TOC] -# I. Overview of .build_config files: +# I. Overview of .build_config.json files: The Android build requires performing computations about dependencies in various targets, which are not possible with the GN build language. To address -this, `.build_config` files are written during the build to store the needed +this, `.build_config.json` files are written during the build to store the needed per-target information as JSON files. -They are always written to `$target_gen_dir/${target_name}.build_config`. +They are always written to `$target_gen_dir/${target_name}.build_config.json`. Many scripts under [`build/android/gyp/`](build/android_gyp/), which are used during the build, can also accept parameter arguments using @@ -25,7 +25,7 @@ This placeholder will ensure that `` is read as a JSON file, then return the value at `[key1][key2]...[keyN]` for the `--some-param` option. Apart from that, the scripts do not need to know anything about the structure -of `.build_config` files (but the GN rules that invoke them do and select +of `.build_config.json` files (but the GN rules that invoke them do and select which `@FileArg()` references to use). For a concrete example, consider the following GN fragment: @@ -42,17 +42,17 @@ android_resources("ui_java_resources") { ``` This will end up generating the following JSON file under -`$CHROMIUM_OUTPUT_DIR/gen/ui/android/ui_java_resources.build_config`: +`$CHROMIUM_OUTPUT_DIR/gen/ui/android/ui_java_resources.build_config.json`: ```json { "deps_info": { "deps_configs": [ - "gen/ui/android/ui_strings_grd.build_config" + "gen/ui/android/ui_strings_grd.build_config.json" ], - "name": "ui_java_resources.build_config", + "name": "ui_java_resources.build_config.json", "package_name": "org.chromium.ui", - "path": "gen/ui/android/ui_java_resources.build_config", + "path": "gen/ui/android/ui_java_resources.build_config.json", "r_text": "gen/ui/android/ui_java_resources_R.txt", "resources_dirs": [ "../../ui/android/java/res" @@ -71,10 +71,10 @@ This will end up generating the following JSON file under } ``` -NOTE: All path values in `.build_config` files are relative to your +NOTE: All path values in `.build_config.json` files are relative to your `$CHROMIUM_OUTPUT_DIR`. -# II. Generation of .build_config files: +# II. Generation of .build_config.json files: They are generated by the GN [`write_build_config()`](gn_write_build_config) internal template, which ends up invoking @@ -85,8 +85,8 @@ is with the following parameters: python ../../build/android/gyp/write_build_config.py \ --type=android_resources \ --depfile gen/ui/android/ui_java_resources__build_config_crbug_908819.d \ - --deps-configs=\[\"gen/ui/android/ui_strings_grd.build_config\"\] \ - --build-config gen/ui/android/ui_java_resources.build_config \ + --deps-configs=\[\"gen/ui/android/ui_strings_grd.build_config.json\"\] \ + --build-config gen/ui/android/ui_java_resources.build_config.json \ --resources-zip resource_zips/ui/android/ui_java_resources.resources.zip \ --package-name org.chromium.ui \ --r-text gen/ui/android/ui_java_resources_R.txt \ @@ -99,10 +99,10 @@ parameters, but not all of it. In particular, the `resources['dependency_zips']` entry was computed by inspecting the content of all dependencies (here, only -`ui_string_grd.build_config`), and collecting their +`ui_string_grd.build_config.json`), and collecting their `deps_configs['resources_zip']` values. -Because a target's `.build_config` file will always be generated after +Because a target's `.build_config.json` file will always be generated after that of all of its dependencies, [`write_build_config.py`](write_build_config_py) can traverse the whole (transitive) set of direct *and* indirect dependencies for a given target @@ -112,10 +112,10 @@ This is the kind of processing that cannot be done at the GN language level, and is very powerful for Android builds. -# III. Usage of .build_config files: +# III. Usage of .build_config.json files: In addition to being parsed by `write_build_config.py`, when they are listed -in the `--deps-configs` of a given target, the `.build_config` files are used +in the `--deps-configs` of a given target, the `.build_config.json` files are used by other scripts under [build/android/gyp/] to build stuff. For example, the GN `android_resources` template uses it to invoke the @@ -127,8 +127,8 @@ python ../../build/android/gyp/process_resources.py \ --depfile gen/ui/android/ui_java_resources_1.d \ --android-sdk-jar ../../third_party/android_sdk/public/platforms/android-29/android.jar \ --aapt-path ../../third_party/android_sdk/public/build-tools/29.0.2/aapt \ - --dependencies-res-zips=@FileArg\(gen/ui/android/ui_java_resources.build_config:resources:dependency_zips\) \ - --extra-res-packages=@FileArg\(gen/ui/android/ui_java_resources.build_config:resources:extra_package_names\) \ + --dependencies-res-zips=@FileArg\(gen/ui/android/ui_java_resources.build_config.json:resources:dependency_zips\) \ + --extra-res-packages=@FileArg\(gen/ui/android/ui_java_resources.build_config.json:resources:extra_package_names\) \ --resource-dirs=\[\"../../ui/android/java/res\"\] \ --debuggable \ --resource-zip-out resource_zips/ui/android/ui_java_resources.resources.zip \ @@ -143,11 +143,11 @@ Note the use of `@FileArg()` references here, to tell the script where to find the information it needs. -# IV. Format of .build_config files: +# IV. Format of .build_config.json files: Thanks to `@FileArg()` references, Python build scripts under [`build/android/gyp/`](build/android/gyp/) do not need to know anything -about the internal format of `.build_config` files. +about the internal format of `.build_config.json` files. This format is decided between internal GN build rules and [`write_build_config.py`][write_build_config_py]. Since these changes rather @@ -155,7 +155,7 @@ often, the format documentation is kept inside the Python script itself, but can be extracted as a Markdown file and visualized with the following commands: ```sh -# Extract .build_config format documentation +# Extract .build_config.json format documentation build/android/gyp/write_build_config.py \ --generate-markdown-format-doc > /tmp/format.md diff --git a/build/android/docs/class_verification_failures.md b/build/android/docs/class_verification_failures.md index e3e474539e69..ab9a24135749 100644 --- a/build/android/docs/class_verification_failures.md +++ b/build/android/docs/class_verification_failures.md @@ -2,6 +2,13 @@ [TOC] +## This document is obsolete + +While class verification failures still exist, our Java optimizer, R8, has +solved this problem for us. Developers should not have to worry about this +problem unless there is a bug in R8. See [this bug](http://b/138781768) for where +they implemented this solution for us. + ## What's this all about? This document aims to explain class verification on Android, how this can affect @@ -82,6 +89,9 @@ selectively to optimize space. ## Chromium's solution +**Note:** This section is no longer relevant as R8 has fixed this for us. We intend +to remove these ApiHelperFor classes - see [this bug](https://crbug.com/1302156). + In Chromium, we try to avoid doing class verification at runtime by manually out-of-lining all Android API usage like so: @@ -127,8 +137,7 @@ look as follows: * These need to exist in a separate class so that Android framework can successfully verify * classes without encountering the new APIs. */ -@VerifiesOnOMR1 -@TargetApi(Build.VERSION_CODES.O_MR1) +@RequiresApi(Build.VERSION_CODES.O_MR1) public class ApiHelperForOMR1 { private ApiHelperForOMR1() {} @@ -136,15 +145,14 @@ public class ApiHelperForOMR1 { } ``` -* `@VerifiesOnO_MR1`: this is a chromium-defined annotation to tell proguard - (and similar tools) not to inline this class or its methods (since that would - defeat the point of out-of-lining!) -* `@TargetApi(Build.VERSION_CODES.O_MR1)`: this tells Android Lint it's OK to +* `@RequiresApi(Build.VERSION_CODES.O_MR1)`: this tells Android Lint it's OK to use OMR1 APIs since this class is only used on OMR1 and above. Substitute `O_MR1` for the [appropriate constant][4], depending when the APIs were introduced. * Don't put any `SDK_INT` checks inside this class, because it must only be called on >= OMR1. +* R8 is smart enough not to inline methods where doing so would introduce + verification failures (b/138781768) ### Out-of-lining if your method has a new type in its signature @@ -174,7 +182,7 @@ public class FooBar { } @VerifiesOnP -@TargetApi(Build.VERSION_CODES.P) +@RequiresApi(Build.VERSION_CODES.P) public class ApiHelperForP { public static NewTypeInAndroidP getNewTypeInAndroidP() { return new NewTypeInAndroidP(); diff --git a/build/android/docs/coverage.md b/build/android/docs/coverage.md index ed241006b86e..2f362f687acf 100644 --- a/build/android/docs/coverage.md +++ b/build/android/docs/coverage.md @@ -1,7 +1,7 @@ # Android code coverage instructions These are instructions for collecting code coverage data for android -instrumentation and JUnit tests. +instrumentation and JUnit tests. For Clang(C++) code coverage refer to [clang coverage]. [TOC] @@ -9,7 +9,7 @@ instrumentation and JUnit tests. In order to use JaCoCo code coverage, we need to create build time pre-instrumented class files and runtime **.exec** files. Then we need to process them using the -**build/android/generate_jacoco_report.py** script. +[build/android/generate_jacoco_report.py](https://source.chromium.org/chromium/chromium/src/+/main:build/android/generate_jacoco_report.py) script. ## How to collect coverage data @@ -75,3 +75,15 @@ class files and runtime **.exec** files. Then we need to process them using the --coverage-dir /tmp/coverage/ \ --sources-json-dir out/Debug/ \ ``` +3. If generating coverage and there are duplicate class files, as can happen + when generating coverage for downstream targets, use the + `--include-substr-filter` option to choose jars in the desired directory. Eg. + for generating coverage report for Clank internal repo + ```shell + build/android/generate_jacoco_report.py --format html \ + --output-dir /tmp/coverage_report/ --coverage-dir /tmp/coverage/ \ + --sources-json-dir out/java_coverage/ \ + --include-substr-filter obj/clank + ``` + +[clang coverage]: https://chromium.googlesource.com/chromium/src/+/HEAD/docs/testing/code_coverage.md \ No newline at end of file diff --git a/build/android/docs/java_asserts.md b/build/android/docs/java_asserts.md new file mode 100644 index 000000000000..37d94c1e3a3a --- /dev/null +++ b/build/android/docs/java_asserts.md @@ -0,0 +1,80 @@ +# Java Asserts in Chromium +This doc exists to explain how asserts in Java are enabled and disabled by +Chromium's build system. + +## javac Assertion Bytecode +Whenever javac compiles a Java class, assertions are transformed into the +following bytecode: + +``` + Code: + 0: getstatic #2 // Static field $assertionsDisabled + 3: ifne 20 // Conditional jump past assertion throw + 12: new #3 // Class java/lang/AssertionError + 19: athrow // Throwing AssertionError + 20: return + +// NOTE: this static block was made just to check the desiredAssertionStatus. +// There was no static block on the class before javac created one. + static {}; + Code: + 2: invokevirtual #6 // Method java/lang/Class.desiredAssertionStatus() + 5: ifne 12 + 8: iconst_1 + 9: goto 13 + 12: iconst_0 + 13: putstatic #2 // Static field $assertionsDisabled + 16: return +``` + +TL;DR - every single assertion is gated behind a `assertionDisabled` flag check, +which is a static field that can be set by the JRE's +`setDefaultAssertionStatus`, `setPackageAssertionStatus`, and +`setClassAssertionStatus` methods. + +## Assertion Enabling/Disabling +Our tools which consume javac output, namely R8 and D8, each have flags which +the build system uses to enable or disable asserts. We control this with the +`enable_java_asserts` gn arg. It does this by deleting the gating check on +`assertionsDisabled` when enabling, and by eliminating any reference to the +assert when disabling. + +```java +// Example equivalents of: +a = foo(); +assert a != 0; +return a; + +// Traditional, unoptimized javac output. +a = foo(); +if (!assertionsDisabled && a == 0) { + throw new AssertionError(); +} +return a; + +// Optimized with assertions enabled. +a = foo(); +if (a == 0) { + throw new AssertionError(); +} +return a; + +// Optimized with assertions disabled. +a = foo(); +return a; +``` + +## Assertion Enabling on Canary +Recently we [enabled +asserts](https://chromium-review.googlesource.com/c/chromium/src/+/3307087) on +Canary. It spiked our crash rate, and it was decided to not do this again, as +it's bad user experience to crash the app incessantly for non-fatal issues. + +So, we asked the R8 team for a feature which would rewrite the bytecode of these +assertions, which they implemented for us. Now, instead of just turning it on +and throwing an `AssertionError`, [R8 would call a provided assertion +handler](https://r8.googlesource.com/r8/+/aefe7bc18a7ce19f3e9c6dac0bedf6d182bbe142/src/main/java/com/android/tools/r8/ParseFlagInfoImpl.java#124) +with the `AssertionError`. We then wrote a [silent assertion +reporter](https://chromium-review.googlesource.com/c/chromium/src/+/3746261) +and this reports Java `AssertionErrors` to our crash server without crashing +the browser. diff --git a/build/android/docs/java_optimization.md b/build/android/docs/java_optimization.md index 0ba0d5035859..da10222a4459 100644 --- a/build/android/docs/java_optimization.md +++ b/build/android/docs/java_optimization.md @@ -84,7 +84,7 @@ intended only for debug builds, or generated JNI classes that are meant to be zero-overhead abstractions. Annotating a class with [@CheckDiscard][checkdiscard] will add a `-checkdiscard` rule automatically. -[checkdiscard]: /base/android/java/src/org/chromium/base/annotations/CheckDiscard.java +[checkdiscard]: /build/android/java/src/org/chromium/build/annotations/CheckDiscard.java ``` Item void org.chromium.base.library_loader.LibraryPrefetcherJni.() was not discarded. diff --git a/build/android/docs/java_toolchain.md b/build/android/docs/java_toolchain.md index ef11548eb49a..a9d229d21ad4 100644 --- a/build/android/docs/java_toolchain.md +++ b/build/android/docs/java_toolchain.md @@ -30,20 +30,23 @@ Most targets produce two separate `.jar` files: ### Step 1: Create interface .jar with turbine or ijar -For prebuilt `.jar` files, use [//third_party/ijar] to create interface `.jar` -from prebuilt `.jar`. - -For non-prebuilt targets, use [//third_party/turbine] to create interface `.jar` -from `.java` source files. Turbine is much faster than javac, and so enables -full compilation to happen more concurrently. - What are interface jars?: -* The contain `.class` files with all non-public symbols and function bodies +* They contain `.class` files with all private symbols and all method bodies removed. * Dependant targets use interface `.jar` files to skip having to be rebuilt when only private implementation details change. +For prebuilt `.jar` files: we use [//third_party/ijar] to create interface +`.jar` files from the prebuilt ones. + +For non-prebuilt `.jar` files`: we use [//third_party/turbine] to create +interface `.jar` files directly from `.java` source files. Turbine is faster +than javac because it does not compile method bodies. Although Turbine causes +us to compile files twice, it speeds up builds by allowing `javac` compilation +of targets to happen concurrently with their dependencies. We also use Turbine +to run our annotation processors. + [//third_party/ijar]: /third_party/ijar/README.chromium [//third_party/turbine]: /third_party/turbine/README.chromium @@ -223,7 +226,7 @@ We use several tools for static analysis. * Runs as part of normal compilation. Controlled by GN arg: `use_errorprone_java_compiler`. * Most useful check: * Enforcement of `@GuardedBy` annotations. -* List of enabled / disabled checks exists [within javac.py](https://cs.chromium.org/chromium/src/build/android/gyp/javac.py?l=30) +* List of enabled / disabled checks exists [within compile_java.py](https://cs.chromium.org/chromium/src/build/android/gyp/compile_java.py?l=30) * Many checks are currently disabled because there is work involved in fixing violations they introduce. Please help! * Custom checks for Chrome: @@ -253,6 +256,8 @@ We use several tools for static analysis. * In other words: Enforces that targets do not rely on indirect dependencies to populate their classpath. * Checks run on the entire codebase, not only on changed lines. +* This is the only static analysis that runs on prebuilt .jar files. +* The same tool is also used for [bytecode rewriting](/docs/ui/android/bytecode_rewriting.md). ### [PRESUBMIT.py](/PRESUBMIT.py): * Checks for banned patterns via `_BANNED_JAVA_FUNCTIONS`. diff --git a/build/android/docs/life_of_a_resource.md b/build/android/docs/life_of_a_resource.md index 3aacd5e0c98c..5e46ef66af27 100644 --- a/build/android/docs/life_of_a_resource.md +++ b/build/android/docs/life_of_a_resource.md @@ -12,21 +12,21 @@ which are [processed differently][native resources]. [native resources]: https://www.chromium.org/developers/tools-we-use-in-chromium/grit/grit-users-guide The steps consume the following files as inputs: -* AndroidManifest.xml - * Including AndroidManifest.xml files from libraries, which get merged +* `AndroidManifest.xml` + * Including `AndroidManifest.xml` files from libraries, which get merged together * res/ directories The steps produce the following intermediate files: -* R.srcjar (contains R.java files) -* R.txt -* .resources.zip +* `R.srcjar` (contains `R.java` files) +* `R.txt` +* `.resources.zip` -The steps produce the following files within an .apk: -* AndroidManifest.xml (a binary xml file) -* resources.arsc (contains all values and configuration metadata) -* res/** (drawables and layouts) -* classes.dex (just a small portion of classes from generated R.java files) +The steps produce the following files within an `.apk`: +* `AndroidManifest.xml` (a binary xml file) +* `resources.arsc` (contains all values and configuration metadata) +* `res/**` (drawables and layouts) +* `classes.dex` (just a small portion of classes from generated `R.java` files) ## The Build Steps @@ -38,56 +38,78 @@ following steps: Inputs: * GN target metadata -* Other .build_config files +* Other `.build_config.json` files Outputs: -* Target-specific .build_config file +* Target-specific `.build_config.json` file -write_build_config.py is run to record target metadata needed by future steps. +`write_build_config.py` is run to record target metadata needed by future steps. For more details, see [build_config.md](build_config.md). ### 2. Prepares resources: Inputs: -* Target-specific build\_config file -* Target-specific Resource dirs (res/ directories) -* resources.zip files from dependencies (used to generate the R.txt/java files) +* Target-specific `.build_config.json` file +* Files listed as `sources` Outputs: -* Target-specific resources.zip (containing only resources in the - target-specific resource dirs, no dependant resources here). -* Target-specific R.txt - * Contains a list of resources and their ids (including of dependencies). -* Target-specific R.java .srcjar - * See [What are R.java files and how are they generated]( - #how-r_java-files-are-generated) +* Target-specific `resources.zip` (contains all resources listed in `sources`). +* Target-specific `R.txt` (list of all resources, including dependencies). -prepare\_resources.py zips up the target-specific resource dirs and generates -R.txt and R.java .srcjars. No optimizations, crunching, etc are done on the -resources. +`prepare_resources.py` zips up the target-specific resource files and generates +`R.txt`. No optimizations, crunching, etc are done on the resources. -**The following steps apply only to apk targets (not library targets).** +**The following steps apply only to apk & bundle targets (not to library +targets).** -### 3. Finalizes apk resources: +### 3. Create target-specific R.java files Inputs: -* Target-specific build\_config file -* Dependencies' resources.zip files +* `R.txt` from dependencies. + +Outputs: +* Target-specific (placeholder) `R.java` file. + +A target-specific `R.java` is generated for each `android_library()` target that +sets `resources_package`. Resource IDs are not known at this phase, so all +values are set as placeholders. This copy of `R` classes are discarded and +replaced with new copies at step 4. + +Example placeholder R.java file: +```java +package org.chromium.mypackage; + +public final class R { + public static class anim { + public static int abc_fade_in = 0; + public static int abc_fade_out = 0; + ... + } + ... +} +``` + +### 4. Finalizes apk resources: + +Inputs: +* Target-specific `.build_config.json` file +* Dependencies' `R.txt` files +* Dependencies' `resources.zip` files Output: -* Packaged resources zip (named foo.ap_) containing: - * AndroidManifest.xml (as binary xml) - * resources.arsc - * res/** -* Final R.txt +* Packaged `resources zip` (named `foo.ap_`) containing: + * `AndroidManifest.xml` (as binary xml) + * `resources.arsc` + * `res/**` +* Final `R.txt` * Contains a list of resources and their ids (including of dependencies). -* Final R.java .srcjar - * See [What are R.java files and how are they generated]( +* Final `R.java` files + * See [What are `R.java` files and how are they generated]( #how-r_java-files-are-generated) -#### 3(a). Compiles resources: +#### 4(a). Compiles resources: For each library / resources target your apk depends on, the following happens: * Use a regex (defined in the apk target) to remove select resources (optional). @@ -102,27 +124,34 @@ For each library / resources target your apk depends on, the following happens: dependency). -#### 3(b). Links resources: +#### 4(b). Links resources: -After each dependency is compiled into an intermediate .zip, all those zips are -linked by the aapt2 link command which does the following: +After each dependency is compiled into an intermediate `.zip`, all those zips +are linked by the `aapt2 link` command which does the following: * Use the order of dependencies supplied so that some resources clober each other. -* Compile the AndroidManifest.xml to binary xml (references to resources are now - using ids rather than the string names) -* Create a resources.arsc file that has the name and values of string +* Compile the `AndroidManifest.xml` to binary xml (references to resources are + now using ids rather than the string names) +* Create a `resources.arsc` file that has the name and values of string resources as well as the name and path of non-string resources (ie. layouts and drawables). * Combine the compiled resources into one packaged resources apk (a zip file - with an .ap\_ extension) that has all the resources related files. + with an `.ap_` extension) that has all the resources related files. -#### 3(c). Optimizes resources: +#### 4(c). Optimizes resources: -This step obfuscates / strips resources names from the resources.arsc so that -they can be looked up only by their numeric ids (assigned in the compile -resources step). Access to resources via `Resources.getIdentifier()` no longer -work unless resources are [allowlisted](#adding-resources-to-the-allowlist). +Targets can opt into the following optimizations: +1) Resource name collapsing: Maps all resources to the same name. Access to + resources via `Resources.getIdentifier()` no longer work unless resources are + [allowlisted](#adding-resources-to-the-allowlist). +2) Resource filename obfuscation: Renames resource file paths from e.g.: + `res/drawable/something.png` to `res/a`. Rename mapping is stored alongside + APKs / bundles in a `.pathmap` file. Renames are based on hashes, and so are + stable between builds (unless a new hash collision occurs). +3) Unused resource removal: Referenced resources are extracted from the + optimized `.dex` and `AndroidManifest.xml`. Resources that are directly or + indirectly used by these files are removed. ## App Bundles and Modules: @@ -184,9 +213,9 @@ The first two bytes of a resource id is the package id. For regular apks, this is `0x7f`. However, Webview is a shared library which gets loaded into other apks. The package id for webview resources is assigned dynamically at runtime. When webview is loaded it calls this [R file's][Base Module R.java File] -onResourcesLoaded function to have the correct package id. When deobfuscating -webview resource ids, disregard the first two bytes in the id when looking it up -in the `R.txt` file. +`onResourcesLoaded()` function to have the correct package id. When +deobfuscating webview resource ids, disregard the first two bytes in the id when +looking it up in the `R.txt` file. Monochrome, when loaded as webview, rewrites the package ids of resources used by the webview portion to the correct value at runtime, otherwise, its resources @@ -196,15 +225,20 @@ have package id `0x7f` when run as a regular apk. ## How R.java files are generated -R.java is a list of static classes, each with multiple static fields containing -ids. These ids are used in java code to reference resources in the apk. +`R.java` contain a set of nested static classes, each with static fields +containing ids. These ids are used in java code to reference resources in +the apk. -There are three types of R.java files in Chrome. -1. Base Module Root R.java Files -2. DFM Root R.java Files -3. Source R.java Files +There are three types of `R.java` files in Chrome. +1. Root / Base Module `R.java` Files +2. DFM `R.java` Files +3. Per-Library `R.java` Files -Example Base Module Root R.java File +### Root / Base Module `R.java` Files +Contain base android resources. All `R.java` files can access base module +resources through inheritance. + +Example Root / Base Module `R.java` File: ```java package gen.base_module; @@ -219,10 +253,12 @@ public final class R { } } ``` -Base module root R.java files contain base android resources. All R.java files -can access base module resources through inheritance. -Example DFM Root R.java File +### DFM `R.java` Files +Extend base module root `R.java` files. This allows DFMs to access their own +resources as well as the base module's resources. + +Example DFM Root `R.java` File ```java package gen.vr_module; @@ -234,27 +270,20 @@ public final class R { } } ``` -DFM root R.java files extend base module root R.java files. This allows DFMs to -access their own resources as well as the base module's resources. -Example Source R.java File +### Per-Library `R.java` Files +Generated for each `android_library()` target that sets `resources_package`. +First a placeholder copy is generated in the `android_library()` step, and then +a final copy is created during finalization. + +Example final per-library `R.java`: ```java package org.chromium.chrome.vr; public final class R { public static final class anim extends - gen.base_module.R.anim {} + gen.vr_module.R.anim {} public static final class animator extends - gen.base_module.R.animator {} + gen.vr_module.R.animator {} } ``` -Source R.java files extend root R.java files and have no resources of their own. -Developers can import these R.java files to access resources in the apk. - -The R.java file generated via the prepare resources step above has temporary ids -which are not marked `final`. That R.java file is only used so that javac can -compile the java code that references R.*. - -The R.java generated during the finalize apk resources step has -permanent ids. These ids are marked as `final` (except webview resources that -need to be [rewritten at runtime](#webview-resource-ids)). diff --git a/build/android/docs/lint.md b/build/android/docs/lint.md index 67e2f8bf3eac..e97fd76f4caf 100644 --- a/build/android/docs/lint.md +++ b/build/android/docs/lint.md @@ -115,26 +115,18 @@ they are generated files, they should **not** be used to suppress lint warnings. One of the approaches above should be used instead. Eventually all the errors in baseline files should be either fixed or ignored permanently. -The following are some common scenarios where you may need to update baseline -files. - -### I updated `cmdline-tools` and now there are tons of new errors! - -This happens every time lint is updated, since lint is provided by -`cmdline-tools`. +Most devs do not need to update baseline files and should not need the script +below. Occasionally when making large build configuration changes it may be +necessary to update baseline files (e.g. increasing the min_sdk_version). Baseline files are defined via the `lint_baseline_file` gn variable. It is -usually defined near a target's `enable_lint` gn variable. To regenerate the -baseline file, delete it and re-run the lint target. The command will fail, but -the baseline file will have been generated. - -This may need to be repeated for all targets that have set `enable_lint = true`, -including downstream targets. Downstream baseline files should be updated and -first to avoid build breakages. Each target has its own `lint_baseline_file` -defined and so all these files can be removed and regenerated as needed. +usually defined near a target's `enable_lint` gn variable. To regenerate all +baseline files, run: -### I updated `library X` and now there are tons of new errors! +``` +$ third_party/android_build_tools/lint/rebuild_baselines.py +``` -This is usually because `library X`'s aar contains custom lint checks and/or -custom annotation definition. Follow the same procedure as updates to -`cmdline-tools`. +This script will also update baseline files in downstream //clank if needed. +Since downstream and upstream use separate lint binaries, it is usually safe +to simply land the update CLs in any order. \ No newline at end of file diff --git a/build/android/download_doclava.py b/build/android/download_doclava.py index 1982fdb8469d..04db084fcb98 100755 --- a/build/android/download_doclava.py +++ b/build/android/download_doclava.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2016 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/dump_apk_resource_strings.py b/build/android/dump_apk_resource_strings.py index 8417e29f0f9d..962103e01938 100755 --- a/build/android/dump_apk_resource_strings.py +++ b/build/android/dump_apk_resource_strings.py @@ -1,12 +1,11 @@ -#!/usr/bin/env vpython +#!/usr/bin/env vpython3 # encoding: utf-8 -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """A script to parse and dump localized strings in resource.arsc files.""" -from __future__ import print_function import argparse import collections @@ -92,7 +91,7 @@ def AutoIndentStringList(lines, indentation=2): # pylint: disable=line-too-long # NOTE: aapt dump will quote the following characters only: \n, \ and " -# see https://android.googlesource.com/platform/frameworks/base/+/master/libs/androidfw/ResourceTypes.cpp#7270 +# see https://cs.android.com/search?q=f:ResourceTypes.cpp # pylint: enable=line-too-long @@ -122,7 +121,7 @@ def UnquoteString(s): while pos + count < size and s[pos + count] == '\\': count += 1 - result += '\\' * (count / 2) + result += '\\' * (count // 2) start = pos + count if count & 1: if start < size: @@ -185,7 +184,7 @@ def ReadStringMapFromRTxt(r_txt_path): return result -class ResourceStringValues(object): +class ResourceStringValues: """Models all possible values for a named string.""" def __init__(self): @@ -219,8 +218,8 @@ def AddValue(self, res_name, res_config, res_value): def ToStringList(self, res_id): """Convert entry to string list for human-friendly output.""" - values = sorted( - [(str(config), value) for config, value in self.res_values.iteritems()]) + values = sorted([(str(config), value) + for config, value in self.res_values.items()]) if res_id is None: # res_id will be None when the resource ID should not be part # of the output. @@ -236,7 +235,7 @@ def ToStringList(self, res_id): return result -class ResourceStringMap(object): +class ResourceStringMap: """Convenience class to hold the set of all localized strings in a table. Usage is the following: @@ -256,7 +255,7 @@ def AddValue(self, res_id, res_name, res_config, res_value): def RemapResourceNames(self, id_name_map): """Rename all entries according to a given {res_id -> res_name} map.""" - for res_id, res_name in id_name_map.iteritems(): + for res_id, res_name in id_name_map.items(): if res_id in self._res_map: self._res_map[res_id].res_name = res_name @@ -278,15 +277,10 @@ def ToStringList(self, omit_ids=False): result = ['Resource strings (count=%d) {' % len(self._res_map)] res_map = self._res_map - # A small function to compare two (res_id, values) tuples - # by resource name first, then resource ID. - def cmp_id_name(a, b): - result = cmp(a[1].res_name, b[1].res_name) - if result == 0: - result = cmp(a[0], b[0]) - return result - - for res_id, _ in sorted(res_map.iteritems(), cmp=cmp_id_name): + # Compare two (res_id, values) tuples by resource name first, then resource + # ID. + for res_id, _ in sorted(res_map.items(), + key=lambda x: (x[1].res_name, x[0])): result += res_map[res_id].ToStringList(None if omit_ids else res_id) result.append('} # Resource strings') return result @@ -386,7 +380,7 @@ def IsFilePathABundle(input_file): _RE_BUNDLE_STRING_LOCALIZED_VALUE = re.compile( r'^\s+locale: "([0-9a-zA-Z-]+)" - \[STR\] "(.*)"$') assert _RE_BUNDLE_STRING_LOCALIZED_VALUE.match( - u' locale: "ar" - [STR] "گزینه\u200cهای بیشتر"'.encode('utf-8')) + ' locale: "ar" - [STR] "گزینه\u200cهای بیشتر"') def ParseBundleResources(bundle_tool_jar_path, bundle_path): @@ -537,11 +531,15 @@ def ParseApkResources(aapt_path, apk_path): res_map = ResourceStringMap() current_locale = None - current_resource_id = None + current_resource_id = -1 # represents undefined. current_resource_name = None need_value = False while True: - line = p.stdout.readline().rstrip() + try: + line = p.stdout.readline().rstrip().decode('utf8') + except UnicodeDecodeError: + continue + if not line: break m = _RE_AAPT_CONFIG.match(line) diff --git a/build/android/emma_coverage_stats.py b/build/android/emma_coverage_stats.py deleted file mode 100755 index f45f4d4d03b7..000000000000 --- a/build/android/emma_coverage_stats.py +++ /dev/null @@ -1,479 +0,0 @@ -#!/usr/bin/env vpython -# Copyright 2015 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Generates incremental code coverage reports for Java code in Chromium. - -Usage: - - build/android/emma_coverage_stats.py -v --out --emma-dir - --lines-for-coverage-file - - - Creates a JSON representation of the overall and file coverage stats and saves - this information to the specified output file. -""" - -import argparse -import collections -import json -import logging -import os -import re -import sys -from xml.etree import ElementTree - -import devil_chromium -from devil.utils import run_tests_helper - -NOT_EXECUTABLE = -1 -NOT_COVERED = 0 -COVERED = 1 -PARTIALLY_COVERED = 2 - -# Coverage information about a single line of code. -LineCoverage = collections.namedtuple( - 'LineCoverage', - ['lineno', 'source', 'covered_status', 'fractional_line_coverage']) - - -class _EmmaHtmlParser(object): - """Encapsulates HTML file parsing operations. - - This class contains all operations related to parsing HTML files that were - produced using the EMMA code coverage tool. - - Example HTML: - - Package links: - org.chromium.chrome - This is returned by the selector |XPATH_SELECT_PACKAGE_ELEMENTS|. - - Class links: - DoActivity.java - This is returned by the selector |XPATH_SELECT_CLASS_ELEMENTS|. - - Line coverage data: - - 108 - - if (index < 0 || index = mSelectors.size()) index = 0; - - - 109 - - - - 110 - if (mSelectors.get(index) != null) { - - - 111 - for (int i = 0; i < mSelectors.size(); i++) { - - Each element is returned by the selector |XPATH_SELECT_LOC|. - - We can parse this to get: - 1. Line number - 2. Line of source code - 3. Coverage status (c, z, or p) - 4. Fractional coverage value (% out of 100 if PARTIALLY_COVERED) - """ - # Selector to match all elements within the rows that are in the table - # that displays all of the different packages. - _XPATH_SELECT_PACKAGE_ELEMENTS = './/BODY/TABLE[4]/TR/TD/A' - - # Selector to match all elements within the rows that are in the table - # that displays all of the different classes within a package. - _XPATH_SELECT_CLASS_ELEMENTS = './/BODY/TABLE[3]/TR/TD/A' - - # Selector to match all elements within the table containing Java source - # code in an EMMA HTML file. - _XPATH_SELECT_LOC = './/BODY/TABLE[4]/TR' - - # Children of HTML elements are represented as a list in ElementTree. These - # constants represent list indices corresponding to relevant child elements. - - # Child 1 contains percentage covered for a line. - _ELEMENT_PERCENT_COVERED = 1 - - # Child 1 contains the original line of source code. - _ELEMENT_CONTAINING_SOURCE_CODE = 1 - - # Child 0 contains the line number. - _ELEMENT_CONTAINING_LINENO = 0 - - # Maps CSS class names to corresponding coverage constants. - _CSS_TO_STATUS = {'c': COVERED, 'p': PARTIALLY_COVERED, 'z': NOT_COVERED} - - # UTF-8 no break space. - _NO_BREAK_SPACE = '\xc2\xa0' - - def __init__(self, emma_file_base_dir): - """Initializes _EmmaHtmlParser. - - Args: - emma_file_base_dir: Path to the location where EMMA report files are - stored. Should be where index.html is stored. - """ - self._base_dir = emma_file_base_dir - self._emma_files_path = os.path.join(self._base_dir, '_files') - self._index_path = os.path.join(self._base_dir, 'index.html') - - def GetLineCoverage(self, emma_file_path): - """Returns a list of LineCoverage objects for the given EMMA HTML file. - - Args: - emma_file_path: String representing the path to the EMMA HTML file. - - Returns: - A list of LineCoverage objects. - """ - line_tr_elements = self._FindElements( - emma_file_path, self._XPATH_SELECT_LOC) - line_coverage = [] - for tr in line_tr_elements: - # Get the coverage status. - coverage_status = self._CSS_TO_STATUS.get(tr.get('CLASS'), NOT_EXECUTABLE) - # Get the fractional coverage value. - if coverage_status == PARTIALLY_COVERED: - title_attribute = (tr[self._ELEMENT_PERCENT_COVERED].get('TITLE')) - # Parse string that contains percent covered: "83% line coverage ...". - percent_covered = title_attribute.split('%')[0] - fractional_coverage = int(percent_covered) / 100.0 - else: - fractional_coverage = 1.0 - - # Get the line number. - lineno_element = tr[self._ELEMENT_CONTAINING_LINENO] - # Handles oddly formatted HTML (where there is an extra tag). - lineno = int(lineno_element.text or - lineno_element[self._ELEMENT_CONTAINING_LINENO].text) - # Get the original line of Java source code. - raw_source = tr[self._ELEMENT_CONTAINING_SOURCE_CODE].text - utf8_source = raw_source.encode('UTF-8') - source = utf8_source.replace(self._NO_BREAK_SPACE, ' ') - - line = LineCoverage(lineno, source, coverage_status, fractional_coverage) - line_coverage.append(line) - - return line_coverage - - def GetPackageNameToEmmaFileDict(self): - """Returns a dict mapping Java packages to EMMA HTML coverage files. - - Parses the EMMA index.html file to get a list of packages, then parses each - package HTML file to get a list of classes for that package, and creates - a dict with this info. - - Returns: - A dict mapping string representation of Java packages (with class - names appended) to the corresponding file paths of EMMA HTML files. - """ - # These elements contain each package name and the path of the file - # where all classes within said package are listed. - package_link_elements = self._FindElements( - self._index_path, self._XPATH_SELECT_PACKAGE_ELEMENTS) - # Maps file path of package directory (EMMA generated) to package name. - # Example: emma_dir/f.html: org.chromium.chrome. - package_links = { - os.path.join(self._base_dir, link.attrib['HREF']): link.text - for link in package_link_elements if 'HREF' in link.attrib - } - - package_to_emma = {} - for package_emma_file_path, package_name in package_links.iteritems(): - # These elements contain each class name in the current package and - # the path of the file where the coverage info is stored for each class. - coverage_file_link_elements = self._FindElements( - package_emma_file_path, self._XPATH_SELECT_CLASS_ELEMENTS) - - for class_name_element in coverage_file_link_elements: - emma_coverage_file_path = os.path.join( - self._emma_files_path, class_name_element.attrib['HREF']) - full_package_name = '%s.%s' % (package_name, class_name_element.text) - package_to_emma[full_package_name] = emma_coverage_file_path - - return package_to_emma - - # pylint: disable=no-self-use - def _FindElements(self, file_path, xpath_selector): - """Reads a HTML file and performs an XPath match. - - Args: - file_path: String representing the path to the HTML file. - xpath_selector: String representing xpath search pattern. - - Returns: - A list of ElementTree.Elements matching the given XPath selector. - Returns an empty list if there is no match. - """ - with open(file_path) as f: - file_contents = f.read().decode('ISO-8859-1').encode('UTF-8') - root = ElementTree.fromstring(file_contents) - return root.findall(xpath_selector) - - -class _EmmaCoverageStats(object): - """Computes code coverage stats for Java code using the coverage tool EMMA. - - This class provides an API that allows users to capture absolute code coverage - and code coverage on a subset of lines for each Java source file. Coverage - reports are generated in JSON format. - """ - # Regular expression to get package name from Java package statement. - RE_PACKAGE_MATCH_GROUP = 'package' - RE_PACKAGE = re.compile(r'package (?P<%s>[\w.]*);' % RE_PACKAGE_MATCH_GROUP) - - def __init__(self, emma_file_base_dir, files_for_coverage): - """Initialize _EmmaCoverageStats. - - Args: - emma_file_base_dir: String representing the path to the base directory - where EMMA HTML coverage files are stored, i.e. parent of index.html. - files_for_coverage: A list of Java source code file paths to get EMMA - coverage for. - """ - self._emma_parser = _EmmaHtmlParser(emma_file_base_dir) - self._source_to_emma = self._GetSourceFileToEmmaFileDict(files_for_coverage) - - def GetCoverageDict(self, lines_for_coverage): - """Returns a dict containing detailed coverage information. - - Gets detailed coverage stats for each file specified in the - |lines_for_coverage| dict and the total incremental number of lines covered - and executable for all files in |lines_for_coverage|. - - Args: - lines_for_coverage: A dict mapping Java source file paths to lists of line - numbers. - - Returns: - A dict containing coverage stats for the given dict of files and lines. - Contains absolute coverage stats for each file, coverage stats for each - file's lines specified in |lines_for_coverage|, line by line coverage - for each file, and overall coverage stats for the lines specified in - |lines_for_coverage|. - """ - file_coverage = {} - for file_path, line_numbers in lines_for_coverage.iteritems(): - file_coverage_dict = self.GetCoverageDictForFile(file_path, line_numbers) - if file_coverage_dict: - file_coverage[file_path] = file_coverage_dict - else: - logging.warning( - 'No code coverage data for %s, skipping.', file_path) - - covered_statuses = [s['incremental'] for s in file_coverage.itervalues()] - num_covered_lines = sum(s['covered'] for s in covered_statuses) - num_total_lines = sum(s['total'] for s in covered_statuses) - return { - 'files': file_coverage, - 'patch': { - 'incremental': { - 'covered': num_covered_lines, - 'total': num_total_lines - } - } - } - - def GetCoverageDictForFile(self, file_path, line_numbers): - """Returns a dict containing detailed coverage info for the given file. - - Args: - file_path: The path to the Java source file that we want to create the - coverage dict for. - line_numbers: A list of integer line numbers to retrieve additional stats - for. - - Returns: - A dict containing absolute, incremental, and line by line coverage for - a file. - """ - if file_path not in self._source_to_emma: - return None - emma_file = self._source_to_emma[file_path] - total_line_coverage = self._emma_parser.GetLineCoverage(emma_file) - incremental_line_coverage = [line for line in total_line_coverage - if line.lineno in line_numbers] - line_by_line_coverage = [ - { - 'line': line.source, - 'coverage': line.covered_status, - 'changed': line.lineno in line_numbers, - 'fractional_coverage': line.fractional_line_coverage, - } - for line in total_line_coverage - ] - total_covered_lines, total_lines = ( - self.GetSummaryStatsForLines(total_line_coverage)) - incremental_covered_lines, incremental_total_lines = ( - self.GetSummaryStatsForLines(incremental_line_coverage)) - - file_coverage_stats = { - 'absolute': { - 'covered': total_covered_lines, - 'total': total_lines - }, - 'incremental': { - 'covered': incremental_covered_lines, - 'total': incremental_total_lines - }, - 'source': line_by_line_coverage, - } - return file_coverage_stats - - # pylint: disable=no-self-use - def GetSummaryStatsForLines(self, line_coverage): - """Gets summary stats for a given list of LineCoverage objects. - - Args: - line_coverage: A list of LineCoverage objects. - - Returns: - A tuple containing the number of lines that are covered and the total - number of lines that are executable, respectively - """ - partially_covered_sum = 0 - covered_status_totals = {COVERED: 0, NOT_COVERED: 0, PARTIALLY_COVERED: 0} - for line in line_coverage: - status = line.covered_status - if status == NOT_EXECUTABLE: - continue - covered_status_totals[status] += 1 - if status == PARTIALLY_COVERED: - partially_covered_sum += line.fractional_line_coverage - - total_covered = covered_status_totals[COVERED] + partially_covered_sum - total_lines = sum(covered_status_totals.values()) - return total_covered, total_lines - - def _GetSourceFileToEmmaFileDict(self, files): - """Gets a dict used to correlate Java source files with EMMA HTML files. - - This method gathers the information needed to correlate EMMA HTML - files with Java source files. EMMA XML and plain text reports do not provide - line by line coverage data, so HTML reports must be used instead. - Unfortunately, the HTML files that are created are given garbage names - (i.e 1.html) so we need to manually correlate EMMA HTML files - with the original Java source files. - - Args: - files: A list of file names for which coverage information is desired. - - Returns: - A dict mapping Java source file paths to EMMA HTML file paths. - """ - # Maps Java source file paths to package names. - # Example: /usr/code/file.java -> org.chromium.file.java. - source_to_package = {} - for file_path in files: - package = self.GetPackageNameFromFile(file_path) - if package: - source_to_package[file_path] = package - else: - logging.warning("Skipping %s because it doesn\'t have a package " - "statement.", file_path) - - # Maps package names to EMMA report HTML files. - # Example: org.chromium.file.java -> out/coverage/1a.html. - package_to_emma = self._emma_parser.GetPackageNameToEmmaFileDict() - # Finally, we have a dict mapping Java file paths to EMMA report files. - # Example: /usr/code/file.java -> out/coverage/1a.html. - source_to_emma = {source: package_to_emma[package] - for source, package in source_to_package.iteritems() - if package in package_to_emma} - return source_to_emma - - @staticmethod - def NeedsCoverage(file_path): - """Checks to see if the file needs to be analyzed for code coverage. - - Args: - file_path: A string representing path to the file. - - Returns: - True for Java files that exist, False for all others. - """ - if os.path.splitext(file_path)[1] == '.java' and os.path.exists(file_path): - return True - else: - logging.info('Skipping file %s, cannot compute code coverage.', file_path) - return False - - @staticmethod - def GetPackageNameFromFile(file_path): - """Gets the full package name including the file name for a given file path. - - Args: - file_path: String representing the path to the Java source file. - - Returns: - A string representing the full package name with file name appended or - None if there is no package statement in the file. - """ - with open(file_path) as f: - file_content = f.read() - package_match = re.search(_EmmaCoverageStats.RE_PACKAGE, file_content) - if package_match: - package = package_match.group(_EmmaCoverageStats.RE_PACKAGE_MATCH_GROUP) - file_name = os.path.basename(file_path) - return '%s.%s' % (package, file_name) - else: - return None - - -def GenerateCoverageReport(line_coverage_file, out_file_path, coverage_dir): - """Generates a coverage report for a given set of lines. - - Writes the results of the coverage analysis to the file specified by - |out_file_path|. - - Args: - line_coverage_file: The path to a file which contains a dict mapping file - names to lists of line numbers. Example: {file1: [1, 2, 3], ...} means - that we should compute coverage information on lines 1 - 3 for file1. - out_file_path: A string representing the location to write the JSON report. - coverage_dir: A string representing the file path where the EMMA - HTML coverage files are located (i.e. folder where index.html is located). - """ - with open(line_coverage_file) as f: - potential_files_for_coverage = json.load(f) - - files_for_coverage = {f: lines - for f, lines in potential_files_for_coverage.iteritems() - if _EmmaCoverageStats.NeedsCoverage(f)} - - coverage_results = {} - if files_for_coverage: - code_coverage = _EmmaCoverageStats(coverage_dir, files_for_coverage.keys()) - coverage_results = code_coverage.GetCoverageDict(files_for_coverage) - else: - logging.info('No Java files requiring coverage were included in %s.', - line_coverage_file) - - with open(out_file_path, 'w+') as out_status_file: - json.dump(coverage_results, out_status_file) - - -def main(): - argparser = argparse.ArgumentParser() - argparser.add_argument('--out', required=True, type=str, - help='Report output file path.') - argparser.add_argument('--emma-dir', required=True, type=str, - help='EMMA HTML report directory.') - argparser.add_argument('--lines-for-coverage-file', required=True, type=str, - help='File containing a JSON object. Should contain a ' - 'dict mapping file names to lists of line numbers of ' - 'code for which coverage information is desired.') - argparser.add_argument('-v', '--verbose', action='count', - help='Print verbose log information.') - args = argparser.parse_args() - run_tests_helper.SetLogLevel(args.verbose) - devil_chromium.Initialize() - GenerateCoverageReport(args.lines_for_coverage_file, args.out, args.emma_dir) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/build/android/emma_coverage_stats_test.py b/build/android/emma_coverage_stats_test.py deleted file mode 100755 index d53292cbd736..000000000000 --- a/build/android/emma_coverage_stats_test.py +++ /dev/null @@ -1,561 +0,0 @@ -#!/usr/bin/env vpython -# Copyright 2015 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# pylint: disable=protected-access - -import unittest -from xml.etree import ElementTree - -import emma_coverage_stats - -import mock # pylint: disable=import-error - -EMPTY_COVERAGE_STATS_DICT = { - 'files': {}, - 'patch': { - 'incremental': { - 'covered': 0, 'total': 0 - } - } -} - - -class _EmmaHtmlParserTest(unittest.TestCase): - """Tests for _EmmaHtmlParser. - - Uses modified EMMA report HTML that contains only the subset of tags needed - for test verification. - """ - - def setUp(self): - self.emma_dir = 'fake/dir/' - self.parser = emma_coverage_stats._EmmaHtmlParser(self.emma_dir) - self.simple_html = 'Test HTML' - self.index_html = ( - '' - '' - '' - '
' - '' - '
' - '' - '
' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '
nameclass, %method, %block, %line, %
org.chromium.chrome.browser0% (0/3)
org.chromium.chrome.browser.tabmodel0% (0/8)
' - '' - '
' - '' - '' - ) - self.package_1_class_list_html = ( - '' - '' - '' - '
' - '' - '
' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '
nameclass, %method, %block, %line, %
IntentHelper.java0% (0/3)0% (0/9)0% (0/97)0% (0/26)
' - '' - '
' - '' - '' - ) - self.package_2_class_list_html = ( - '' - '' - '' - '
' - '' - '
' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '
nameclass, %method, %block, %line, %
ContentSetting.java0% (0/1)
DevToolsServer.java
FileProviderHelper.java
ContextualMenuBar.java
AccessibilityUtil.java
NavigationPopup.java
' - '' - '
' - '' - '' - ) - self.partially_covered_tr_html = ( - '' - '108' - '' - 'if (index < 0 || index = mSelectors.size()) index = 0;' - '' - ) - self.covered_tr_html = ( - '' - '110' - ' if (mSelectors.get(index) != null) {' - '' - ) - self.not_executable_tr_html = ( - '' - '109' - ' ' - '' - ) - self.tr_with_extra_a_tag = ( - '' - '' - '
54' - '' - ' }' - '' - ) - - def testInit(self): - emma_dir = self.emma_dir - parser = emma_coverage_stats._EmmaHtmlParser(emma_dir) - self.assertEqual(parser._base_dir, emma_dir) - self.assertEqual(parser._emma_files_path, 'fake/dir/_files') - self.assertEqual(parser._index_path, 'fake/dir/index.html') - - def testFindElements_basic(self): - read_values = [self.simple_html] - found, _ = MockOpenForFunction(self.parser._FindElements, read_values, - file_path='fake', xpath_selector='.//TD') - self.assertIs(type(found), list) - self.assertIs(type(found[0]), ElementTree.Element) - self.assertEqual(found[0].text, 'Test HTML') - - def testFindElements_multipleElements(self): - multiple_trs = self.not_executable_tr_html + self.covered_tr_html - read_values = ['
' + multiple_trs + '
'] - found, _ = MockOpenForFunction(self.parser._FindElements, read_values, - file_path='fake', xpath_selector='.//TR') - self.assertEquals(2, len(found)) - - def testFindElements_noMatch(self): - read_values = [self.simple_html] - found, _ = MockOpenForFunction(self.parser._FindElements, read_values, - file_path='fake', xpath_selector='.//TR') - self.assertEqual(found, []) - - def testFindElements_badFilePath(self): - with self.assertRaises(IOError): - with mock.patch('os.path.exists', return_value=False): - self.parser._FindElements('fake', xpath_selector='//tr') - - def testGetPackageNameToEmmaFileDict_basic(self): - expected_dict = { - 'org.chromium.chrome.browser.AccessibilityUtil.java': - 'fake/dir/_files/23.html', - 'org.chromium.chrome.browser.ContextualMenuBar.java': - 'fake/dir/_files/22.html', - 'org.chromium.chrome.browser.tabmodel.IntentHelper.java': - 'fake/dir/_files/1e.html', - 'org.chromium.chrome.browser.ContentSetting.java': - 'fake/dir/_files/1f.html', - 'org.chromium.chrome.browser.DevToolsServer.java': - 'fake/dir/_files/20.html', - 'org.chromium.chrome.browser.NavigationPopup.java': - 'fake/dir/_files/24.html', - 'org.chromium.chrome.browser.FileProviderHelper.java': - 'fake/dir/_files/21.html'} - - read_values = [self.index_html, self.package_1_class_list_html, - self.package_2_class_list_html] - return_dict, mock_open = MockOpenForFunction( - self.parser.GetPackageNameToEmmaFileDict, read_values) - - self.assertDictEqual(return_dict, expected_dict) - self.assertEqual(mock_open.call_count, 3) - calls = [mock.call('fake/dir/index.html'), - mock.call('fake/dir/_files/1.html'), - mock.call('fake/dir/_files/0.html')] - mock_open.assert_has_calls(calls) - - def testGetPackageNameToEmmaFileDict_noPackageElements(self): - self.parser._FindElements = mock.Mock(return_value=[]) - return_dict = self.parser.GetPackageNameToEmmaFileDict() - self.assertDictEqual({}, return_dict) - - def testGetLineCoverage_status_basic(self): - line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html]) - self.assertEqual(line_coverage[0].covered_status, - emma_coverage_stats.COVERED) - - def testGetLineCoverage_status_statusMissing(self): - line_coverage = self.GetLineCoverageWithFakeElements( - [self.not_executable_tr_html]) - self.assertEqual(line_coverage[0].covered_status, - emma_coverage_stats.NOT_EXECUTABLE) - - def testGetLineCoverage_fractionalCoverage_basic(self): - line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html]) - self.assertEqual(line_coverage[0].fractional_line_coverage, 1.0) - - def testGetLineCoverage_fractionalCoverage_partial(self): - line_coverage = self.GetLineCoverageWithFakeElements( - [self.partially_covered_tr_html]) - self.assertEqual(line_coverage[0].fractional_line_coverage, 0.78) - - def testGetLineCoverage_lineno_basic(self): - line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html]) - self.assertEqual(line_coverage[0].lineno, 110) - - def testGetLineCoverage_lineno_withAlternativeHtml(self): - line_coverage = self.GetLineCoverageWithFakeElements( - [self.tr_with_extra_a_tag]) - self.assertEqual(line_coverage[0].lineno, 54) - - def testGetLineCoverage_source(self): - self.parser._FindElements = mock.Mock( - return_value=[ElementTree.fromstring(self.covered_tr_html)]) - line_coverage = self.parser.GetLineCoverage('fake_path') - self.assertEqual(line_coverage[0].source, - ' if (mSelectors.get(index) != null) {') - - def testGetLineCoverage_multipleElements(self): - line_coverage = self.GetLineCoverageWithFakeElements( - [self.covered_tr_html, self.partially_covered_tr_html, - self.tr_with_extra_a_tag]) - self.assertEqual(len(line_coverage), 3) - - def GetLineCoverageWithFakeElements(self, html_elements): - """Wraps GetLineCoverage so mock HTML can easily be used. - - Args: - html_elements: List of strings each representing an HTML element. - - Returns: - A list of LineCoverage objects. - """ - elements = [ElementTree.fromstring(string) for string in html_elements] - with mock.patch('emma_coverage_stats._EmmaHtmlParser._FindElements', - return_value=elements): - return self.parser.GetLineCoverage('fake_path') - - -class _EmmaCoverageStatsTest(unittest.TestCase): - """Tests for _EmmaCoverageStats.""" - - def setUp(self): - self.good_source_to_emma = { - '/path/to/1/File1.java': '/emma/1.html', - '/path/2/File2.java': '/emma/2.html', - '/path/2/File3.java': '/emma/3.html' - } - self.line_coverage = [ - emma_coverage_stats.LineCoverage( - 1, '', emma_coverage_stats.COVERED, 1.0), - emma_coverage_stats.LineCoverage( - 2, '', emma_coverage_stats.COVERED, 1.0), - emma_coverage_stats.LineCoverage( - 3, '', emma_coverage_stats.NOT_EXECUTABLE, 1.0), - emma_coverage_stats.LineCoverage( - 4, '', emma_coverage_stats.NOT_COVERED, 1.0), - emma_coverage_stats.LineCoverage( - 5, '', emma_coverage_stats.PARTIALLY_COVERED, 0.85), - emma_coverage_stats.LineCoverage( - 6, '', emma_coverage_stats.PARTIALLY_COVERED, 0.20) - ] - self.lines_for_coverage = [1, 3, 5, 6] - with mock.patch('emma_coverage_stats._EmmaHtmlParser._FindElements', - return_value=[]): - self.simple_coverage = emma_coverage_stats._EmmaCoverageStats( - 'fake_dir', {}) - - def testInit(self): - coverage_stats = self.simple_coverage - self.assertIsInstance(coverage_stats._emma_parser, - emma_coverage_stats._EmmaHtmlParser) - self.assertIsInstance(coverage_stats._source_to_emma, dict) - - def testNeedsCoverage_withExistingJavaFile(self): - test_file = '/path/to/file/File.java' - with mock.patch('os.path.exists', return_value=True): - self.assertTrue( - emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file)) - - def testNeedsCoverage_withNonJavaFile(self): - test_file = '/path/to/file/File.c' - with mock.patch('os.path.exists', return_value=True): - self.assertFalse( - emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file)) - - def testNeedsCoverage_fileDoesNotExist(self): - test_file = '/path/to/file/File.java' - with mock.patch('os.path.exists', return_value=False): - self.assertFalse( - emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file)) - - def testGetPackageNameFromFile_basic(self): - test_file_text = """// Test Copyright - package org.chromium.chrome.browser; - import android.graphics.RectF;""" - result_package, _ = MockOpenForFunction( - emma_coverage_stats._EmmaCoverageStats.GetPackageNameFromFile, - [test_file_text], file_path='/path/to/file/File.java') - self.assertEqual(result_package, 'org.chromium.chrome.browser.File.java') - - def testGetPackageNameFromFile_noPackageStatement(self): - result_package, _ = MockOpenForFunction( - emma_coverage_stats._EmmaCoverageStats.GetPackageNameFromFile, - ['not a package statement'], file_path='/path/to/file/File.java') - self.assertIsNone(result_package) - - def testGetSummaryStatsForLines_basic(self): - covered, total = self.simple_coverage.GetSummaryStatsForLines( - self.line_coverage) - self.assertEqual(covered, 3.05) - self.assertEqual(total, 5) - - def testGetSourceFileToEmmaFileDict(self): - package_names = { - '/path/to/1/File1.java': 'org.fake.one.File1.java', - '/path/2/File2.java': 'org.fake.File2.java', - '/path/2/File3.java': 'org.fake.File3.java' - } - package_to_emma = { - 'org.fake.one.File1.java': '/emma/1.html', - 'org.fake.File2.java': '/emma/2.html', - 'org.fake.File3.java': '/emma/3.html' - } - with mock.patch('os.path.exists', return_value=True): - coverage_stats = self.simple_coverage - coverage_stats._emma_parser.GetPackageNameToEmmaFileDict = mock.MagicMock( - return_value=package_to_emma) - coverage_stats.GetPackageNameFromFile = lambda x: package_names[x] - result_dict = coverage_stats._GetSourceFileToEmmaFileDict( - package_names.keys()) - self.assertDictEqual(result_dict, self.good_source_to_emma) - - def testGetCoverageDictForFile(self): - line_coverage = self.line_coverage - self.simple_coverage._emma_parser.GetLineCoverage = lambda x: line_coverage - self.simple_coverage._source_to_emma = {'/fake/src': 'fake/emma'} - lines = self.lines_for_coverage - expected_dict = { - 'absolute': { - 'covered': 3.05, - 'total': 5 - }, - 'incremental': { - 'covered': 2.05, - 'total': 3 - }, - 'source': [ - { - 'line': line_coverage[0].source, - 'coverage': line_coverage[0].covered_status, - 'changed': True, - 'fractional_coverage': line_coverage[0].fractional_line_coverage, - }, - { - 'line': line_coverage[1].source, - 'coverage': line_coverage[1].covered_status, - 'changed': False, - 'fractional_coverage': line_coverage[1].fractional_line_coverage, - }, - { - 'line': line_coverage[2].source, - 'coverage': line_coverage[2].covered_status, - 'changed': True, - 'fractional_coverage': line_coverage[2].fractional_line_coverage, - }, - { - 'line': line_coverage[3].source, - 'coverage': line_coverage[3].covered_status, - 'changed': False, - 'fractional_coverage': line_coverage[3].fractional_line_coverage, - }, - { - 'line': line_coverage[4].source, - 'coverage': line_coverage[4].covered_status, - 'changed': True, - 'fractional_coverage': line_coverage[4].fractional_line_coverage, - }, - { - 'line': line_coverage[5].source, - 'coverage': line_coverage[5].covered_status, - 'changed': True, - 'fractional_coverage': line_coverage[5].fractional_line_coverage, - } - ] - } - result_dict = self.simple_coverage.GetCoverageDictForFile( - '/fake/src', lines) - self.assertDictEqual(result_dict, expected_dict) - - def testGetCoverageDictForFile_emptyCoverage(self): - expected_dict = { - 'absolute': {'covered': 0, 'total': 0}, - 'incremental': {'covered': 0, 'total': 0}, - 'source': [] - } - self.simple_coverage._emma_parser.GetLineCoverage = lambda x: [] - self.simple_coverage._source_to_emma = {'fake_dir': 'fake/emma'} - result_dict = self.simple_coverage.GetCoverageDictForFile('fake_dir', {}) - self.assertDictEqual(result_dict, expected_dict) - - def testGetCoverageDictForFile_missingCoverage(self): - self.simple_coverage._source_to_emma = {} - result_dict = self.simple_coverage.GetCoverageDictForFile('fake_file', {}) - self.assertIsNone(result_dict) - - def testGetCoverageDict_basic(self): - files_for_coverage = { - '/path/to/1/File1.java': [1, 3, 4], - '/path/2/File2.java': [1, 2] - } - self.simple_coverage._source_to_emma = { - '/path/to/1/File1.java': 'emma_1', - '/path/2/File2.java': 'emma_2' - } - coverage_info = { - 'emma_1': [ - emma_coverage_stats.LineCoverage( - 1, '', emma_coverage_stats.COVERED, 1.0), - emma_coverage_stats.LineCoverage( - 2, '', emma_coverage_stats.PARTIALLY_COVERED, 0.5), - emma_coverage_stats.LineCoverage( - 3, '', emma_coverage_stats.NOT_EXECUTABLE, 1.0), - emma_coverage_stats.LineCoverage( - 4, '', emma_coverage_stats.COVERED, 1.0) - ], - 'emma_2': [ - emma_coverage_stats.LineCoverage( - 1, '', emma_coverage_stats.NOT_COVERED, 1.0), - emma_coverage_stats.LineCoverage( - 2, '', emma_coverage_stats.COVERED, 1.0) - ] - } - expected_dict = { - 'files': { - '/path/2/File2.java': { - 'absolute': {'covered': 1, 'total': 2}, - 'incremental': {'covered': 1, 'total': 2}, - 'source': [{'changed': True, 'coverage': 0, - 'line': '', 'fractional_coverage': 1.0}, - {'changed': True, 'coverage': 1, - 'line': '', 'fractional_coverage': 1.0}] - }, - '/path/to/1/File1.java': { - 'absolute': {'covered': 2.5, 'total': 3}, - 'incremental': {'covered': 2, 'total': 2}, - 'source': [{'changed': True, 'coverage': 1, - 'line': '', 'fractional_coverage': 1.0}, - {'changed': False, 'coverage': 2, - 'line': '', 'fractional_coverage': 0.5}, - {'changed': True, 'coverage': -1, - 'line': '', 'fractional_coverage': 1.0}, - {'changed': True, 'coverage': 1, - 'line': '', 'fractional_coverage': 1.0}] - } - }, - 'patch': {'incremental': {'covered': 3, 'total': 4}} - } - # Return the relevant coverage info for each file. - self.simple_coverage._emma_parser.GetLineCoverage = ( - lambda x: coverage_info[x]) - result_dict = self.simple_coverage.GetCoverageDict(files_for_coverage) - self.assertDictEqual(result_dict, expected_dict) - - def testGetCoverageDict_noCoverage(self): - result_dict = self.simple_coverage.GetCoverageDict({}) - self.assertDictEqual(result_dict, EMPTY_COVERAGE_STATS_DICT) - - -class EmmaCoverageStatsGenerateCoverageReport(unittest.TestCase): - """Tests for GenerateCoverageReport.""" - - def testGenerateCoverageReport_missingJsonFile(self): - with self.assertRaises(IOError): - with mock.patch('os.path.exists', return_value=False): - emma_coverage_stats.GenerateCoverageReport('', '', '') - - def testGenerateCoverageReport_invalidJsonFile(self): - with self.assertRaises(ValueError): - with mock.patch('os.path.exists', return_value=True): - MockOpenForFunction(emma_coverage_stats.GenerateCoverageReport, [''], - line_coverage_file='', out_file_path='', - coverage_dir='') - - -def MockOpenForFunction(func, side_effects, **kwargs): - """Allows easy mock open and read for callables that open multiple files. - - Will mock the python open function in a way such that each time read() is - called on an open file, the next element in |side_effects| is returned. This - makes it easier to test functions that call open() multiple times. - - Args: - func: The callable to invoke once mock files are setup. - side_effects: A list of return values for each file to return once read. - Length of list should be equal to the number calls to open in |func|. - **kwargs: Keyword arguments to be passed to |func|. - - Returns: - A tuple containing the return value of |func| and the MagicMock object used - to mock all calls to open respectively. - """ - mock_open = mock.mock_open() - mock_open.side_effect = [mock.mock_open(read_data=side_effect).return_value - for side_effect in side_effects] - with mock.patch('__builtin__.open', mock_open): - return func(**kwargs), mock_open - - -if __name__ == '__main__': - # Suppress logging messages. - unittest.main(buffer=True) diff --git a/build/android/envsetup.sh b/build/android/envsetup.sh index 7f549d9cf75b..315db296a7ab 100755 --- a/build/android/envsetup.sh +++ b/build/android/envsetup.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/fast_local_dev_server.py b/build/android/fast_local_dev_server.py index a35c5007e470..282dcf553447 100755 --- a/build/android/fast_local_dev_server.py +++ b/build/android/fast_local_dev_server.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2021 The Chromium Authors. All rights reserved. +# Copyright 2021 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Creates an server to offload non-critical-path GN targets.""" @@ -110,6 +110,7 @@ def _num_running_processes(): if line.startswith('procs_running'): return int(line.rstrip().split()[1]) assert False, 'Could not read /proc/stat' + return 0 def _maybe_start_tasks(self): if self._deactivated: @@ -174,6 +175,8 @@ def start(self, on_complete_callback: Callable[[], None]) -> int: # TODO(wnwen): Use ionice to reduce resource consumption. TaskStats.add_process() log(f'STARTING {self.name}') + # This use of preexec_fn is sufficiently simple, just one os.nice call. + # pylint: disable=subprocess-popen-preexec-fn self._proc = subprocess.Popen( self.cmd, stdout=subprocess.PIPE, @@ -281,6 +284,8 @@ def _process_requests(sock: socket.socket): tasks: Dict[Tuple[str, str], Task] = {} task_manager = TaskManager() try: + log('READY... Remember to set android_static_analysis="build_server" in ' + 'args.gn files') for data in _listen_for_request_data(sock): task = Task(name=data['name'], cwd=data['cwd'], @@ -303,11 +308,28 @@ def _process_requests(sock: socket.socket): def main(): parser = argparse.ArgumentParser(description=__doc__) - parser.parse_args() + parser.add_argument( + '--fail-if-not-running', + action='store_true', + help='Used by GN to fail fast if the build server is not running.') + args = parser.parse_args() + if args.fail_if_not_running: + with socket.socket(socket.AF_UNIX) as sock: + try: + sock.connect(server_utils.SOCKET_ADDRESS) + except socket.error: + print('Build server is not running and ' + 'android_static_analysis="build_server" is set.\nPlease run ' + 'this command in a separate terminal:\n\n' + '$ build/android/fast_local_dev_server.py\n') + return 1 + else: + return 0 with socket.socket(socket.AF_UNIX) as sock: sock.bind(server_utils.SOCKET_ADDRESS) sock.listen() _process_requests(sock) + return 0 if __name__ == '__main__': diff --git a/build/android/generate_jacoco_report.py b/build/android/generate_jacoco_report.py index b8c4a9783f88..44e82acbf70f 100755 --- a/build/android/generate_jacoco_report.py +++ b/build/android/generate_jacoco_report.py @@ -1,12 +1,11 @@ -#!/usr/bin/env vpython +#!/usr/bin/env vpython3 -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Aggregates Jacoco coverage files to produce output.""" -from __future__ import print_function import argparse import fnmatch @@ -35,29 +34,28 @@ _SOURCES_JSON_FILES_SUFFIX = '__jacoco_sources.json' -# These should match the jar class files generated in internal_rules.gni -_DEVICE_CLASS_EXCLUDE_SUFFIX = 'host_filter.jar' -_HOST_CLASS_EXCLUDE_SUFFIX = 'device_filter.jar' - -def _CreateClassfileArgs(class_files, exclude_suffix=None, include_substr=None): +def _CreateClassfileArgs(class_files, report_type, include_substr=None): """Returns a filtered list of files with classfile option. Args: class_files: A list of class files. - exclude_suffix: Suffix to look for to exclude. + report_type: A string indicating if device or host files are desired. include_substr: A substring that must be present to include the file. - exclude_suffix takes precedence over this. Returns: A list of files that don't use the suffix. """ + # These should match the jar class files generated in internal_rules.gni + search_jar_suffix = '%s.filter.jar' % report_type result_class_files = [] for f in class_files: - include_file = True - if exclude_suffix and f.endswith(exclude_suffix): - include_file = False - # Exclude overrides include. + include_file = False + if f.endswith(search_jar_suffix): + include_file = True + + # If include_substr is specified, remove files that don't have the + # required substring. if include_file and include_substr and include_substr not in f: include_file = False if include_file: @@ -67,13 +65,7 @@ def _CreateClassfileArgs(class_files, exclude_suffix=None, include_substr=None): def _GenerateReportOutputArgs(args, class_files, report_type): - class_jar_exclude = None - if report_type == 'device': - class_jar_exclude = _DEVICE_CLASS_EXCLUDE_SUFFIX - elif report_type == 'host': - class_jar_exclude = _HOST_CLASS_EXCLUDE_SUFFIX - - cmd = _CreateClassfileArgs(class_files, class_jar_exclude, + cmd = _CreateClassfileArgs(class_files, report_type, args.include_substr_filter) if args.format == 'html': report_dir = os.path.join(args.output_dir, report_type) diff --git a/build/android/gradle/AndroidManifest.xml b/build/android/gradle/AndroidManifest.xml index f3e50e0c936e..dfbb9bdf3145 100644 --- a/build/android/gradle/AndroidManifest.xml +++ b/build/android/gradle/AndroidManifest.xml @@ -1,6 +1,6 @@ diff --git a/build/android/gradle/android.jinja b/build/android/gradle/android.jinja index 7d566dd78ddf..3b66b97bab9b 100644 --- a/build/android/gradle/android.jinja +++ b/build/android/gradle/android.jinja @@ -51,13 +51,13 @@ android { defaultConfig { vectorDrawables.useSupportLibrary = true - minSdkVersion 24 + minSdkVersion {{ min_sdk_version }} targetSdkVersion {{ target_sdk_version }} } compileOptions { - sourceCompatibility JavaVersion.VERSION_1_8 - targetCompatibility JavaVersion.VERSION_1_8 + sourceCompatibility JavaVersion.VERSION_11 + targetCompatibility JavaVersion.VERSION_11 } {% if native is defined %} diff --git a/build/android/gradle/generate_gradle.py b/build/android/gradle/generate_gradle.py index 80d0b0a7fc41..bc05baf9bbf3 100755 --- a/build/android/gradle/generate_gradle.py +++ b/build/android/gradle/generate_gradle.py @@ -1,5 +1,5 @@ -#!/usr/bin/env vpython -# Copyright 2016 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -12,6 +12,7 @@ import json import logging import os +import pathlib import re import shutil import subprocess @@ -32,6 +33,12 @@ sys.path.append(os.path.dirname(_BUILD_ANDROID)) import gn_helpers +# Typically these should track the versions that works on the slowest release +# channel, i.e. Android Studio stable. +_DEFAULT_ANDROID_GRADLE_PLUGIN_VERSION = '7.3.1' +_DEFAULT_KOTLIN_GRADLE_PLUGIN_VERSION = '1.8.0' +_DEFAULT_GRADLE_WRAPPER_VERSION = '7.4' + _DEPOT_TOOLS_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'depot_tools') _DEFAULT_ANDROID_MANIFEST_PATH = os.path.join( @@ -45,8 +52,6 @@ _CMAKE_FILE = 'CMakeLists.txt' # This needs to come first alphabetically among all modules. _MODULE_ALL = '_all' -_SRC_INTERNAL = os.path.join( - os.path.dirname(host_paths.DIR_SOURCE_ROOT), 'src-internal') _INSTRUMENTATION_TARGET_SUFFIX = '_test_apk__test_apk__apk' _DEFAULT_TARGETS = [ @@ -57,6 +62,7 @@ '//chrome/android:chrome_junit_tests', '//chrome/android:chrome_public_apk', '//chrome/android:chrome_public_test_apk', + '//chrome/android:chrome_public_unit_test_apk', '//content/public/android:content_junit_tests', '//content/shell/android:content_shell_apk', # Below must be included even with --all since they are libraries. @@ -64,12 +70,6 @@ '//tools/android/errorprone_plugin:errorprone_plugin_java', ] -_EXCLUDED_PREBUILT_JARS = [ - # Android Studio already provides Desugar runtime. - # Including it would cause linking error because of a duplicate class. - 'lib.java/third_party/bazel/desugar/Desugar-runtime.jar' -] - def _TemplatePath(name): return os.path.join(_FILE_DIR, '{}.jinja'.format(name)) @@ -83,7 +83,7 @@ def _RebasePath(path_or_list, new_cwd=None, old_cwd=None): """ if path_or_list is None: return [] - if not isinstance(path_or_list, basestring): + if not isinstance(path_or_list, str): return [_RebasePath(p, new_cwd, old_cwd) for p in path_or_list] if old_cwd is None: old_cwd = constants.GetOutDirectory() @@ -94,11 +94,6 @@ def _RebasePath(path_or_list, new_cwd=None, old_cwd=None): return os.path.abspath(os.path.join(old_cwd, path_or_list)) -def _IsSubpathOf(child, parent): - """Returns whether |child| is a subpath of |parent|.""" - return not os.path.relpath(child, parent).startswith(os.pardir) - - def _WriteFile(path, data): """Writes |data| to |path|, constucting parent directories if necessary.""" logging.info('Writing %s', path) @@ -132,10 +127,10 @@ def _QueryForAllGnTargets(output_dir): '--nested', '--build', '--output-directory', output_dir ] logging.info('Running: %r', cmd) - return subprocess.check_output(cmd).splitlines() + return subprocess.check_output(cmd, encoding='UTF-8').splitlines() -class _ProjectEntry(object): +class _ProjectEntry: """Helper class for project entries.""" _cached_entries = {} @@ -160,7 +155,7 @@ def FromGnTarget(cls, gn_target): @classmethod def FromBuildConfigPath(cls, path): prefix = 'gen/' - suffix = '.build_config' + suffix = '.build_config.json' assert path.startswith(prefix) and path.endswith(suffix), path subdir = path[len(prefix):-len(suffix)] gn_target = '//%s:%s' % (os.path.split(subdir)) @@ -178,9 +173,6 @@ def GnTarget(self): def NinjaTarget(self): return self._gn_target[2:] - def GnBuildConfigTarget(self): - return '%s__build_config_crbug_908819' % self._gn_target - def GradleSubdir(self): """Returns the output subdirectory.""" ninja_target = self.NinjaTarget() @@ -198,9 +190,9 @@ def ProjectName(self): return self.GradleSubdir().replace(os.path.sep, '.') def BuildConfig(self): - """Reads and returns the project's .build_config JSON.""" + """Reads and returns the project's .build_config.json JSON.""" if not self._build_config: - path = os.path.join('gen', self.GradleSubdir() + '.build_config') + path = os.path.join('gen', self.GradleSubdir() + '.build_config.json') with open(_RebasePath(path)) as jsonfile: self._build_config = json.load(jsonfile) return self._build_config @@ -225,7 +217,7 @@ def IsValid(self): 'java_library', "java_annotation_processor", 'java_binary', - 'junit_binary', + 'robolectric_binary', ) def ResSources(self): @@ -233,17 +225,16 @@ def ResSources(self): def JavaFiles(self): if self._java_files is None: - java_sources_file = self.DepsInfo().get('java_sources_file') + target_sources_file = self.DepsInfo().get('target_sources_file') java_files = [] - if java_sources_file: - java_sources_file = _RebasePath(java_sources_file) - java_files = build_utils.ReadSourcesList(java_sources_file) + if target_sources_file: + target_sources_file = _RebasePath(target_sources_file) + java_files = build_utils.ReadSourcesList(target_sources_file) self._java_files = java_files return self._java_files def PrebuiltJars(self): - all_jars = self.Gradle().get('dependent_prebuilt_jars', []) - return [i for i in all_jars if i not in _EXCLUDED_PREBUILT_JARS] + return self.Gradle().get('dependent_prebuilt_jars', []) def AllEntries(self): """Returns a list of all entries that the current entry depends on. @@ -263,16 +254,15 @@ def AllEntries(self): return self._all_entries -class _ProjectContextGenerator(object): +class _ProjectContextGenerator: """Helper class to generate gradle build files""" def __init__(self, project_dir, build_vars, use_gradle_process_resources, - jinja_processor, split_projects, channel): + jinja_processor, split_projects): self.project_dir = project_dir self.build_vars = build_vars self.use_gradle_process_resources = use_gradle_process_resources self.jinja_processor = jinja_processor self.split_projects = split_projects - self.channel = channel self.processed_java_dirs = set() self.processed_prebuilts = set() self.processed_res_dirs = set() @@ -303,12 +293,14 @@ def _GenCustomManifest(self, entry): for library targets.""" resource_packages = entry.Javac().get('resource_packages') if not resource_packages: - logging.debug('Target ' + entry.GnTarget() + ' includes resources from ' - 'unknown package. Unable to process with gradle.') + logging.debug( + 'Target %s includes resources from unknown package. ' + 'Unable to process with gradle.', entry.GnTarget()) return _DEFAULT_ANDROID_MANIFEST_PATH - elif len(resource_packages) > 1: - logging.debug('Target ' + entry.GnTarget() + ' includes resources from ' - 'multiple packages. Unable to process with gradle.') + if len(resource_packages) > 1: + logging.debug( + 'Target %s includes resources from multiple packages. ' + 'Unable to process with gradle.', entry.GnTarget()) return _DEFAULT_ANDROID_MANIFEST_PATH variables = {'package': resource_packages[0]} @@ -423,37 +415,42 @@ def _ComputeExcludeFilters(wanted_files, unwanted_files, parent_dir): return excludes -def _ComputeJavaSourceDirsAndExcludes(output_dir, java_files): +def _ComputeJavaSourceDirsAndExcludes(output_dir, source_files): """Computes the list of java source directories and exclude patterns. - 1. Computes the root java source directories from the list of files. + This includes both Java and Kotlin files since both are listed in the same + "java" section for gradle. + + 1. Computes the root source directories from the list of files. 2. Compute exclude patterns that exclude all extra files only. - 3. Returns the list of java source directories and exclude patterns. + 3. Returns the list of source directories and exclude patterns. """ java_dirs = [] excludes = [] - if java_files: - java_files = _RebasePath(java_files) - computed_dirs = _ComputeJavaSourceDirs(java_files) - java_dirs = computed_dirs.keys() - all_found_java_files = set() - - for directory, files in computed_dirs.iteritems(): - found_java_files = build_utils.FindInDirectory(directory, '*.java') - all_found_java_files.update(found_java_files) - unwanted_java_files = set(found_java_files) - set(files) - if unwanted_java_files: + if source_files: + source_files = _RebasePath(source_files) + computed_dirs = _ComputeJavaSourceDirs(source_files) + java_dirs = list(computed_dirs.keys()) + all_found_source_files = set() + + for directory, files in computed_dirs.items(): + found_source_files = (build_utils.FindInDirectory(directory, '*.java') + + build_utils.FindInDirectory(directory, '*.kt')) + all_found_source_files.update(found_source_files) + unwanted_source_files = set(found_source_files) - set(files) + if unwanted_source_files: logging.debug('Directory requires excludes: %s', directory) excludes.extend( - _ComputeExcludeFilters(files, unwanted_java_files, directory)) + _ComputeExcludeFilters(files, unwanted_source_files, directory)) - missing_java_files = set(java_files) - all_found_java_files + missing_source_files = set(source_files) - all_found_source_files # Warn only about non-generated files that are missing. - missing_java_files = [p for p in missing_java_files - if not p.startswith(output_dir)] - if missing_java_files: - logging.warning( - 'Some java files were not found: %s', missing_java_files) + missing_source_files = [ + p for p in missing_source_files if not p.startswith(output_dir) + ] + if missing_source_files: + logging.warning('Some source files were not found: %s', + missing_source_files) return java_dirs, excludes @@ -486,6 +483,19 @@ def _CreateJniLibsDir(output_dir, entry_output_dir, so_files): return [] +def _ParseVersionFromFile(file_path, version_regex_string, default_version): + if os.path.exists(file_path): + content = pathlib.Path(file_path).read_text() + match = re.search(version_regex_string, content) + if match: + version = match.group(1) + logging.info('Using existing version %s in %s.', version, file_path) + return version + logging.warning('Unable to find %s in %s:\n%s', version_regex_string, + file_path, content) + return default_version + + def _GenerateLocalProperties(sdk_dir): """Returns the data for local.properties as a string.""" return '\n'.join([ @@ -495,14 +505,17 @@ def _GenerateLocalProperties(sdk_dir): ]) -def _GenerateGradleWrapperPropertiesCanary(): +def _GenerateGradleWrapperProperties(file_path): """Returns the data for gradle-wrapper.properties as a string.""" - # Before May 2020, this wasn't necessary. Might not be necessary at some point - # in the future? + + version = _ParseVersionFromFile(file_path, + r'/distributions/gradle-([\d.]+)-all.zip', + _DEFAULT_GRADLE_WRAPPER_VERSION) + return '\n'.join([ '# Generated by //build/android/gradle/generate_gradle.py', - ('distributionUrl=https\\://services.gradle.org/distributions/' - 'gradle-6.5-rc-1-all.zip\n'), + ('distributionUrl=https\\://services.gradle.org' + f'/distributions/gradle-{version}-all.zip'), '', ]) @@ -520,15 +533,16 @@ def _GenerateGradleProperties(): def _GenerateBaseVars(generator, build_vars): variables = {} - variables['compile_sdk_version'] = ( - 'android-%s' % build_vars['compile_sdk_version']) - target_sdk_version = build_vars['android_sdk_version'] - if target_sdk_version.isalpha(): + # Avoid pre-release SDKs since Studio might not know how to download them. + variables['compile_sdk_version'] = ('android-%s' % + build_vars['public_android_sdk_version']) + target_sdk_version = build_vars['public_android_sdk_version'] + if str(target_sdk_version).isalpha(): target_sdk_version = '"{}"'.format(target_sdk_version) variables['target_sdk_version'] = target_sdk_version + variables['min_sdk_version'] = build_vars['default_min_sdk_version'] variables['use_gradle_process_resources'] = ( generator.use_gradle_process_resources) - variables['channel'] = generator.channel return variables @@ -545,14 +559,14 @@ def _GenerateGradleFile(entry, generator, build_vars, jinja_processor): gradle_treat_as_prebuilt = deps_info.get('gradle_treat_as_prebuilt', False) if is_prebuilt or gradle_treat_as_prebuilt: return None - elif deps_info['requires_android']: + if deps_info['requires_android']: target_type = 'android_library' else: target_type = 'java_library' elif deps_info['type'] == 'java_binary': target_type = 'java_binary' variables['main_class'] = deps_info.get('main_class') - elif deps_info['type'] == 'junit_binary': + elif deps_info['type'] == 'robolectric_binary': target_type = 'android_junit' sourceSetName = 'test' else: @@ -570,7 +584,7 @@ def _GenerateGradleFile(entry, generator, build_vars, jinja_processor): test_entry = generator.Generate(e) test_entry['android_manifest'] = generator.GenerateManifest(e) variables['android_test'].append(test_entry) - for key, value in test_entry.iteritems(): + for key, value in test_entry.items(): if isinstance(value, list): test_entry[key] = sorted(set(value) - set(variables['main'][key])) @@ -640,12 +654,12 @@ def Relativize(paths): 'android_manifest': Relativize(_DEFAULT_ANDROID_MANIFEST_PATH), 'java_dirs': Relativize(main_java_dirs), 'prebuilts': Relativize(prebuilts), - 'java_excludes': ['**/*.java'], + 'java_excludes': ['**/*.java', '**/*.kt'], 'res_dirs': Relativize(res_dirs), } variables['android_test'] = [{ 'java_dirs': Relativize(junit_test_java_dirs), - 'java_excludes': ['**/*.java'], + 'java_excludes': ['**/*.java', '**/*.kt'], }] if native_targets: variables['native'] = _GetNative( @@ -660,9 +674,20 @@ def Relativize(paths): os.path.join(gradle_output_dir, _MODULE_ALL, _CMAKE_FILE), cmake_data) -def _GenerateRootGradle(jinja_processor, channel): +def _GenerateRootGradle(jinja_processor, file_path): """Returns the data for the root project's build.gradle.""" - return jinja_processor.Render(_TemplatePath('root'), {'channel': channel}) + android_gradle_plugin_version = _ParseVersionFromFile( + file_path, r'com.android.tools.build:gradle:([\d.]+)', + _DEFAULT_ANDROID_GRADLE_PLUGIN_VERSION) + kotlin_gradle_plugin_version = _ParseVersionFromFile( + file_path, r'org.jetbrains.kotlin:kotlin-gradle-plugin:([\d.]+)', + _DEFAULT_KOTLIN_GRADLE_PLUGIN_VERSION) + + return jinja_processor.Render( + _TemplatePath('root'), { + 'android_gradle_plugin_version': android_gradle_plugin_version, + 'kotlin_gradle_plugin_version': kotlin_gradle_plugin_version, + }) def _GenerateSettingsGradle(project_entries): @@ -766,26 +791,11 @@ def main(): action='append', help='GN native targets to generate for. May be ' 'repeated.') - parser.add_argument('--compile-sdk-version', - type=int, - default=0, - help='Override compileSdkVersion for android sdk docs. ' - 'Useful when sources for android_sdk_version is ' - 'not available in Android Studio.') parser.add_argument( '--sdk-path', default=os.path.expanduser('~/Android/Sdk'), help='The path to use as the SDK root, overrides the ' 'default at ~/Android/Sdk.') - version_group = parser.add_mutually_exclusive_group() - version_group.add_argument('--beta', - action='store_true', - help='Generate a project that is compatible with ' - 'Android Studio Beta.') - version_group.add_argument('--canary', - action='store_true', - help='Generate a project that is compatible with ' - 'Android Studio Canary.') args = parser.parse_args() if args.output_directory: constants.SetOutputDirectory(args.output_directory) @@ -835,19 +845,9 @@ def main(): build_vars = gn_helpers.ReadBuildVars(output_dir) jinja_processor = jinja_template.JinjaProcessor(_FILE_DIR) - if args.beta: - channel = 'beta' - elif args.canary: - channel = 'canary' - else: - channel = 'stable' - if args.compile_sdk_version: - build_vars['compile_sdk_version'] = args.compile_sdk_version - else: - build_vars['compile_sdk_version'] = build_vars['android_sdk_version'] generator = _ProjectContextGenerator(_gradle_output_dir, build_vars, - args.use_gradle_process_resources, jinja_processor, args.split_projects, - channel) + args.use_gradle_process_resources, + jinja_processor, args.split_projects) main_entries = [_ProjectEntry.FromGnTarget(t) for t in targets] @@ -856,7 +856,7 @@ def main(): # used by apks/bundles/binaries/tests or that are explicitly mentioned in # --targets. BASE_TYPES = ('android_apk', 'android_app_bundle_module', 'java_binary', - 'junit_binary') + 'robolectric_binary') main_entries = [ e for e in main_entries if (e.GetType() in BASE_TYPES or e.GnTarget() in targets_from_args @@ -887,8 +887,9 @@ def main(): _GenerateModuleAll(_gradle_output_dir, generator, build_vars, jinja_processor, args.native_targets) - _WriteFile(os.path.join(generator.project_dir, _GRADLE_BUILD_FILE), - _GenerateRootGradle(jinja_processor, channel)) + root_gradle_path = os.path.join(generator.project_dir, _GRADLE_BUILD_FILE) + _WriteFile(root_gradle_path, + _GenerateRootGradle(jinja_processor, root_gradle_path)) _WriteFile(os.path.join(generator.project_dir, 'settings.gradle'), _GenerateSettingsGradle(project_entries)) @@ -906,10 +907,8 @@ def main(): wrapper_properties = os.path.join(generator.project_dir, 'gradle', 'wrapper', 'gradle-wrapper.properties') - if os.path.exists(wrapper_properties): - os.unlink(wrapper_properties) - if args.canary: - _WriteFile(wrapper_properties, _GenerateGradleWrapperPropertiesCanary()) + _WriteFile(wrapper_properties, + _GenerateGradleWrapperProperties(wrapper_properties)) generated_inputs = set() for entry in entries: @@ -919,13 +918,19 @@ def main(): # Build all paths references by .gradle that exist within output_dir. generated_inputs.update(generator.GeneratedInputs(entry_to_gen)) if generated_inputs: - targets = _RebasePath(generated_inputs, output_dir) + # Skip targets outside the output_dir since those are not generated. + targets = [ + p for p in _RebasePath(generated_inputs, output_dir) + if not p.startswith(os.pardir) + ] _RunNinja(output_dir, targets) - logging.warning('Generated files will only appear once you\'ve built them.') - logging.warning('Generated projects for Android Studio %s', channel) - logging.warning('For more tips: https://chromium.googlesource.com/chromium' - '/src.git/+/master/docs/android_studio.md') + print('Generated projects for Android Studio.') + print('** Building using Android Studio / Gradle does not work.') + print('** This project is only for IDE editing & tools.') + print('Note: Generated files will appear only if they have been built') + print('For more tips: https://chromium.googlesource.com/chromium/src.git/' + '+/main/docs/android_studio.md') if __name__ == '__main__': diff --git a/build/android/gradle/gn_to_cmake.py b/build/android/gradle/gn_to_cmake.py deleted file mode 100755 index d3e80ae76846..000000000000 --- a/build/android/gradle/gn_to_cmake.py +++ /dev/null @@ -1,689 +0,0 @@ -#!/usr/bin/env python -# Copyright 2016 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Usage: gn_to_cmake.py - -gn gen out/config --ide=json --json-ide-script=../../gn/gn_to_cmake.py - -or - -gn gen out/config --ide=json -python gn/gn_to_cmake.py out/config/project.json - -The first is recommended, as it will auto-update. -""" - -from __future__ import print_function - -import functools -import json -import posixpath -import string -import sys - - -def CMakeStringEscape(a): - """Escapes the string 'a' for use inside a CMake string. - - This means escaping - '\' otherwise it may be seen as modifying the next character - '"' otherwise it will end the string - ';' otherwise the string becomes a list - - The following do not need to be escaped - '#' when the lexer is in string state, this does not start a comment - """ - return a.replace('\\', '\\\\').replace(';', '\\;').replace('"', '\\"') - - -def CMakeTargetEscape(a): - """Escapes the string 'a' for use as a CMake target name. - - CMP0037 in CMake 3.0 restricts target names to "^[A-Za-z0-9_.:+-]+$" - The ':' is only allowed for imported targets. - """ - def Escape(c): - if c in string.ascii_letters or c in string.digits or c in '_.+-': - return c - else: - return '__' - return ''.join([Escape(c) for c in a]) - - -def SetVariable(out, variable_name, value): - """Sets a CMake variable.""" - out.write('set("') - out.write(CMakeStringEscape(variable_name)) - out.write('" "') - out.write(CMakeStringEscape(value)) - out.write('")\n') - - -def SetVariableList(out, variable_name, values): - """Sets a CMake variable to a list.""" - if not values: - return SetVariable(out, variable_name, "") - if len(values) == 1: - return SetVariable(out, variable_name, values[0]) - out.write('list(APPEND "') - out.write(CMakeStringEscape(variable_name)) - out.write('"\n "') - out.write('"\n "'.join([CMakeStringEscape(value) for value in values])) - out.write('")\n') - - -def SetFilesProperty(output, variable, property_name, values, sep): - """Given a set of source files, sets the given property on them.""" - output.write('set_source_files_properties(') - WriteVariable(output, variable) - output.write(' PROPERTIES ') - output.write(property_name) - output.write(' "') - for value in values: - output.write(CMakeStringEscape(value)) - output.write(sep) - output.write('")\n') - - -def SetCurrentTargetProperty(out, property_name, values, sep=''): - """Given a target, sets the given property.""" - out.write('set_target_properties("${target}" PROPERTIES ') - out.write(property_name) - out.write(' "') - for value in values: - out.write(CMakeStringEscape(value)) - out.write(sep) - out.write('")\n') - - -def WriteVariable(output, variable_name, prepend=None): - if prepend: - output.write(prepend) - output.write('${') - output.write(variable_name) - output.write('}') - - -# See GetSourceFileType in gn -source_file_types = { - '.cc': 'cxx', - '.cpp': 'cxx', - '.cxx': 'cxx', - '.c': 'c', - '.s': 'asm', - '.S': 'asm', - '.asm': 'asm', - '.o': 'obj', - '.obj': 'obj', -} - - -class CMakeTargetType(object): - def __init__(self, command, modifier, property_modifier, is_linkable): - self.command = command - self.modifier = modifier - self.property_modifier = property_modifier - self.is_linkable = is_linkable -CMakeTargetType.custom = CMakeTargetType('add_custom_target', 'SOURCES', - None, False) - -# See GetStringForOutputType in gn -cmake_target_types = { - 'unknown': CMakeTargetType.custom, - 'group': CMakeTargetType.custom, - 'executable': CMakeTargetType('add_executable', None, 'RUNTIME', True), - 'loadable_module': CMakeTargetType('add_library', 'MODULE', 'LIBRARY', True), - 'shared_library': CMakeTargetType('add_library', 'SHARED', 'LIBRARY', True), - 'static_library': CMakeTargetType('add_library', 'STATIC', 'ARCHIVE', False), - 'source_set': CMakeTargetType('add_library', 'OBJECT', None, False), - 'copy': CMakeTargetType.custom, - 'action': CMakeTargetType.custom, - 'action_foreach': CMakeTargetType.custom, - 'bundle_data': CMakeTargetType.custom, - 'create_bundle': CMakeTargetType.custom, -} - - -def FindFirstOf(s, a): - return min(s.find(i) for i in a if i in s) - - -def GetCMakeTargetName(gn_target_name): - # See /src/tools/gn/label.cc#Resolve - # //base/test:test_support(//build/toolchain/win:msvc) - path_separator = FindFirstOf(gn_target_name, (':', '(')) - location = None - name = None - toolchain = None - if not path_separator: - location = gn_target_name[2:] - else: - location = gn_target_name[2:path_separator] - toolchain_separator = gn_target_name.find('(', path_separator) - if toolchain_separator == -1: - name = gn_target_name[path_separator + 1:] - else: - if toolchain_separator > path_separator: - name = gn_target_name[path_separator + 1:toolchain_separator] - assert gn_target_name.endswith(')') - toolchain = gn_target_name[toolchain_separator + 1:-1] - assert location or name - - cmake_target_name = None - if location.endswith('/' + name): - cmake_target_name = location - elif location: - cmake_target_name = location + '_' + name - else: - cmake_target_name = name - if toolchain: - cmake_target_name += '--' + toolchain - return CMakeTargetEscape(cmake_target_name) - - -class Project(object): - def __init__(self, project_json): - self.targets = project_json['targets'] - build_settings = project_json['build_settings'] - self.root_path = build_settings['root_path'] - self.build_path = posixpath.join(self.root_path, - build_settings['build_dir'][2:]) - self.object_source_deps = {} - - def GetAbsolutePath(self, path): - if path.startswith("//"): - return self.root_path + "/" + path[2:] - else: - return path - - def GetObjectSourceDependencies(self, gn_target_name, object_dependencies): - """All OBJECT libraries whose sources have not been absorbed.""" - if gn_target_name in self.object_source_deps: - object_dependencies.update(self.object_source_deps[gn_target_name]) - return - target_deps = set() - dependencies = self.targets[gn_target_name].get('deps', []) - for dependency in dependencies: - dependency_type = self.targets[dependency].get('type', None) - if dependency_type == 'source_set': - target_deps.add(dependency) - if dependency_type not in gn_target_types_that_absorb_objects: - self.GetObjectSourceDependencies(dependency, target_deps) - self.object_source_deps[gn_target_name] = target_deps - object_dependencies.update(target_deps) - - def GetObjectLibraryDependencies(self, gn_target_name, object_dependencies): - """All OBJECT libraries whose libraries have not been absorbed.""" - dependencies = self.targets[gn_target_name].get('deps', []) - for dependency in dependencies: - dependency_type = self.targets[dependency].get('type', None) - if dependency_type == 'source_set': - object_dependencies.add(dependency) - self.GetObjectLibraryDependencies(dependency, object_dependencies) - - -class Target(object): - def __init__(self, gn_target_name, project): - self.gn_name = gn_target_name - self.properties = project.targets[self.gn_name] - self.cmake_name = GetCMakeTargetName(self.gn_name) - self.gn_type = self.properties.get('type', None) - self.cmake_type = cmake_target_types.get(self.gn_type, None) - - -def WriteAction(out, target, project, sources, synthetic_dependencies): - outputs = [] - output_directories = set() - for output in target.properties.get('outputs', []): - output_abs_path = project.GetAbsolutePath(output) - outputs.append(output_abs_path) - output_directory = posixpath.dirname(output_abs_path) - if output_directory: - output_directories.add(output_directory) - outputs_name = '${target}__output' - SetVariableList(out, outputs_name, outputs) - - out.write('add_custom_command(OUTPUT ') - WriteVariable(out, outputs_name) - out.write('\n') - - if output_directories: - out.write(' COMMAND ${CMAKE_COMMAND} -E make_directory "') - out.write('" "'.join([CMakeStringEscape(d) for d in output_directories])) - out.write('"\n') - - script = target.properties['script'] - arguments = target.properties['args'] - out.write(' COMMAND python "') - out.write(CMakeStringEscape(project.GetAbsolutePath(script))) - out.write('"') - if arguments: - out.write('\n "') - out.write('"\n "'.join([CMakeStringEscape(a) for a in arguments])) - out.write('"') - out.write('\n') - - out.write(' DEPENDS ') - for sources_type_name in sources.values(): - WriteVariable(out, sources_type_name, ' ') - out.write('\n') - - #TODO: CMake 3.7 is introducing DEPFILE - - out.write(' WORKING_DIRECTORY "') - out.write(CMakeStringEscape(project.build_path)) - out.write('"\n') - - out.write(' COMMENT "Action: ${target}"\n') - - out.write(' VERBATIM)\n') - - synthetic_dependencies.add(outputs_name) - - -def ExpandPlaceholders(source, a): - source_dir, source_file_part = posixpath.split(source) - source_name_part, _ = posixpath.splitext(source_file_part) - #TODO: {{source_gen_dir}}, {{source_out_dir}}, {{response_file_name}} - return a.replace('{{source}}', source) \ - .replace('{{source_file_part}}', source_file_part) \ - .replace('{{source_name_part}}', source_name_part) \ - .replace('{{source_dir}}', source_dir) \ - .replace('{{source_root_relative_dir}}', source_dir) - - -def WriteActionForEach(out, target, project, sources, synthetic_dependencies): - all_outputs = target.properties.get('outputs', []) - inputs = target.properties.get('sources', []) - # TODO: consider expanding 'output_patterns' instead. - outputs_per_input = len(all_outputs) / len(inputs) - for count, source in enumerate(inputs): - source_abs_path = project.GetAbsolutePath(source) - - outputs = [] - output_directories = set() - for output in all_outputs[outputs_per_input * count: - outputs_per_input * (count+1)]: - output_abs_path = project.GetAbsolutePath(output) - outputs.append(output_abs_path) - output_directory = posixpath.dirname(output_abs_path) - if output_directory: - output_directories.add(output_directory) - outputs_name = '${target}__output_' + str(count) - SetVariableList(out, outputs_name, outputs) - - out.write('add_custom_command(OUTPUT ') - WriteVariable(out, outputs_name) - out.write('\n') - - if output_directories: - out.write(' COMMAND ${CMAKE_COMMAND} -E make_directory "') - out.write('" "'.join([CMakeStringEscape(d) for d in output_directories])) - out.write('"\n') - - script = target.properties['script'] - # TODO: need to expand {{xxx}} in arguments - arguments = target.properties['args'] - out.write(' COMMAND python "') - out.write(CMakeStringEscape(project.GetAbsolutePath(script))) - out.write('"') - if arguments: - out.write('\n "') - expand = functools.partial(ExpandPlaceholders, source_abs_path) - out.write('"\n "'.join( - [CMakeStringEscape(expand(a)) for a in arguments])) - out.write('"') - out.write('\n') - - out.write(' DEPENDS') - if 'input' in sources: - WriteVariable(out, sources['input'], ' ') - out.write(' "') - out.write(CMakeStringEscape(source_abs_path)) - out.write('"\n') - - #TODO: CMake 3.7 is introducing DEPFILE - - out.write(' WORKING_DIRECTORY "') - out.write(CMakeStringEscape(project.build_path)) - out.write('"\n') - - out.write(' COMMENT "Action ${target} on ') - out.write(CMakeStringEscape(source_abs_path)) - out.write('"\n') - - out.write(' VERBATIM)\n') - - synthetic_dependencies.add(outputs_name) - - -def WriteCopy(out, target, project, sources, synthetic_dependencies): - inputs = target.properties.get('sources', []) - raw_outputs = target.properties.get('outputs', []) - - # TODO: consider expanding 'output_patterns' instead. - outputs = [] - for output in raw_outputs: - output_abs_path = project.GetAbsolutePath(output) - outputs.append(output_abs_path) - outputs_name = '${target}__output' - SetVariableList(out, outputs_name, outputs) - - out.write('add_custom_command(OUTPUT ') - WriteVariable(out, outputs_name) - out.write('\n') - - for src, dst in zip(inputs, outputs): - out.write(' COMMAND ${CMAKE_COMMAND} -E copy "') - out.write(CMakeStringEscape(project.GetAbsolutePath(src))) - out.write('" "') - out.write(CMakeStringEscape(dst)) - out.write('"\n') - - out.write(' DEPENDS ') - for sources_type_name in sources.values(): - WriteVariable(out, sources_type_name, ' ') - out.write('\n') - - out.write(' WORKING_DIRECTORY "') - out.write(CMakeStringEscape(project.build_path)) - out.write('"\n') - - out.write(' COMMENT "Copy ${target}"\n') - - out.write(' VERBATIM)\n') - - synthetic_dependencies.add(outputs_name) - - -def WriteCompilerFlags(out, target, project, sources): - # Hack, set linker language to c if no c or cxx files present. - if not 'c' in sources and not 'cxx' in sources: - SetCurrentTargetProperty(out, 'LINKER_LANGUAGE', ['C']) - - # Mark uncompiled sources as uncompiled. - if 'input' in sources: - SetFilesProperty(out, sources['input'], 'HEADER_FILE_ONLY', ('True',), '') - if 'other' in sources: - SetFilesProperty(out, sources['other'], 'HEADER_FILE_ONLY', ('True',), '') - - # Mark object sources as linkable. - if 'obj' in sources: - SetFilesProperty(out, sources['obj'], 'EXTERNAL_OBJECT', ('True',), '') - - # TODO: 'output_name', 'output_dir', 'output_extension' - # This includes using 'source_outputs' to direct compiler output. - - # Includes - includes = target.properties.get('include_dirs', []) - if includes: - out.write('set_property(TARGET "${target}" ') - out.write('APPEND PROPERTY INCLUDE_DIRECTORIES') - for include_dir in includes: - out.write('\n "') - out.write(project.GetAbsolutePath(include_dir)) - out.write('"') - out.write(')\n') - - # Defines - defines = target.properties.get('defines', []) - if defines: - SetCurrentTargetProperty(out, 'COMPILE_DEFINITIONS', defines, ';') - - # Compile flags - # "arflags", "asmflags", "cflags", - # "cflags_c", "clfags_cc", "cflags_objc", "clfags_objcc" - # CMake does not have per target lang compile flags. - # TODO: $<$:cflags_cc style generator expression. - # http://public.kitware.com/Bug/view.php?id=14857 - flags = [] - flags.extend(target.properties.get('cflags', [])) - cflags_asm = target.properties.get('asmflags', []) - cflags_c = target.properties.get('cflags_c', []) - cflags_cxx = target.properties.get('cflags_cc', []) - if 'c' in sources and not any(k in sources for k in ('asm', 'cxx')): - flags.extend(cflags_c) - elif 'cxx' in sources and not any(k in sources for k in ('asm', 'c')): - flags.extend(cflags_cxx) - else: - # TODO: This is broken, one cannot generally set properties on files, - # as other targets may require different properties on the same files. - if 'asm' in sources and cflags_asm: - SetFilesProperty(out, sources['asm'], 'COMPILE_FLAGS', cflags_asm, ' ') - if 'c' in sources and cflags_c: - SetFilesProperty(out, sources['c'], 'COMPILE_FLAGS', cflags_c, ' ') - if 'cxx' in sources and cflags_cxx: - SetFilesProperty(out, sources['cxx'], 'COMPILE_FLAGS', cflags_cxx, ' ') - if flags: - SetCurrentTargetProperty(out, 'COMPILE_FLAGS', flags, ' ') - - # Linker flags - ldflags = target.properties.get('ldflags', []) - if ldflags: - SetCurrentTargetProperty(out, 'LINK_FLAGS', ldflags, ' ') - - -gn_target_types_that_absorb_objects = ( - 'executable', - 'loadable_module', - 'shared_library', - 'static_library' -) - - -def WriteSourceVariables(out, target, project): - # gn separates the sheep from the goats based on file extensions. - # A full separation is done here because of flag handing (see Compile flags). - source_types = {'cxx':[], 'c':[], 'asm':[], - 'obj':[], 'obj_target':[], 'input':[], 'other':[]} - - # TODO .def files on Windows - for source in target.properties.get('sources', []): - _, ext = posixpath.splitext(source) - source_abs_path = project.GetAbsolutePath(source) - source_types[source_file_types.get(ext, 'other')].append(source_abs_path) - - for input_path in target.properties.get('inputs', []): - input_abs_path = project.GetAbsolutePath(input_path) - source_types['input'].append(input_abs_path) - - # OBJECT library dependencies need to be listed as sources. - # Only executables and non-OBJECT libraries may reference an OBJECT library. - # https://gitlab.kitware.com/cmake/cmake/issues/14778 - if target.gn_type in gn_target_types_that_absorb_objects: - object_dependencies = set() - project.GetObjectSourceDependencies(target.gn_name, object_dependencies) - for dependency in object_dependencies: - cmake_dependency_name = GetCMakeTargetName(dependency) - obj_target_sources = '$' - source_types['obj_target'].append(obj_target_sources) - - sources = {} - for source_type, sources_of_type in source_types.items(): - if sources_of_type: - sources[source_type] = '${target}__' + source_type + '_srcs' - SetVariableList(out, sources[source_type], sources_of_type) - return sources - - -def WriteTarget(out, target, project): - out.write('\n#') - out.write(target.gn_name) - out.write('\n') - - if target.cmake_type is None: - print('Target {} has unknown target type {}, skipping.'.format( - target.gn_name, target.gn_type)) - return - - SetVariable(out, 'target', target.cmake_name) - - sources = WriteSourceVariables(out, target, project) - - synthetic_dependencies = set() - if target.gn_type == 'action': - WriteAction(out, target, project, sources, synthetic_dependencies) - if target.gn_type == 'action_foreach': - WriteActionForEach(out, target, project, sources, synthetic_dependencies) - if target.gn_type == 'copy': - WriteCopy(out, target, project, sources, synthetic_dependencies) - - out.write(target.cmake_type.command) - out.write('("${target}"') - if target.cmake_type.modifier is not None: - out.write(' ') - out.write(target.cmake_type.modifier) - for sources_type_name in sources.values(): - WriteVariable(out, sources_type_name, ' ') - if synthetic_dependencies: - out.write(' DEPENDS') - for synthetic_dependencie in synthetic_dependencies: - WriteVariable(out, synthetic_dependencie, ' ') - out.write(')\n') - - if target.cmake_type.command != 'add_custom_target': - WriteCompilerFlags(out, target, project, sources) - - libraries = set() - nonlibraries = set() - - dependencies = set(target.properties.get('deps', [])) - # Transitive OBJECT libraries are in sources. - # Those sources are dependent on the OBJECT library dependencies. - # Those sources cannot bring in library dependencies. - object_dependencies = set() - if target.gn_type != 'source_set': - project.GetObjectLibraryDependencies(target.gn_name, object_dependencies) - for object_dependency in object_dependencies: - dependencies.update(project.targets.get(object_dependency).get('deps', [])) - - for dependency in dependencies: - gn_dependency_type = project.targets.get(dependency, {}).get('type', None) - cmake_dependency_type = cmake_target_types.get(gn_dependency_type, None) - cmake_dependency_name = GetCMakeTargetName(dependency) - if cmake_dependency_type.command != 'add_library': - nonlibraries.add(cmake_dependency_name) - elif cmake_dependency_type.modifier != 'OBJECT': - if target.cmake_type.is_linkable: - libraries.add(cmake_dependency_name) - else: - nonlibraries.add(cmake_dependency_name) - - # Non-library dependencies. - if nonlibraries: - out.write('add_dependencies("${target}"') - for nonlibrary in nonlibraries: - out.write('\n "') - out.write(nonlibrary) - out.write('"') - out.write(')\n') - - # Non-OBJECT library dependencies. - external_libraries = target.properties.get('libs', []) - if target.cmake_type.is_linkable and (external_libraries or libraries): - library_dirs = target.properties.get('lib_dirs', []) - if library_dirs: - SetVariableList(out, '${target}__library_directories', library_dirs) - - system_libraries = [] - for external_library in external_libraries: - if '/' in external_library: - libraries.add(project.GetAbsolutePath(external_library)) - else: - if external_library.endswith('.framework'): - external_library = external_library[:-len('.framework')] - system_library = 'library__' + external_library - if library_dirs: - system_library = system_library + '__for_${target}' - out.write('find_library("') - out.write(CMakeStringEscape(system_library)) - out.write('" "') - out.write(CMakeStringEscape(external_library)) - out.write('"') - if library_dirs: - out.write(' PATHS "') - WriteVariable(out, '${target}__library_directories') - out.write('"') - out.write(')\n') - system_libraries.append(system_library) - out.write('target_link_libraries("${target}"') - for library in libraries: - out.write('\n "') - out.write(CMakeStringEscape(library)) - out.write('"') - for system_library in system_libraries: - WriteVariable(out, system_library, '\n "') - out.write('"') - out.write(')\n') - - -def WriteProject(project): - out = open(posixpath.join(project.build_path, 'CMakeLists.txt'), 'w+') - out.write('# Generated by gn_to_cmake.py.\n') - out.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n') - out.write('cmake_policy(VERSION 2.8.8)\n\n') - - # Update the gn generated ninja build. - # If a build file has changed, this will update CMakeLists.ext if - # gn gen out/config --ide=json --json-ide-script=../../gn/gn_to_cmake.py - # style was used to create this config. - out.write('execute_process(COMMAND ninja -C "') - out.write(CMakeStringEscape(project.build_path)) - out.write('" build.ninja)\n') - - out.write('include(CMakeLists.ext)\n') - out.close() - - out = open(posixpath.join(project.build_path, 'CMakeLists.ext'), 'w+') - out.write('# Generated by gn_to_cmake.py.\n') - out.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n') - out.write('cmake_policy(VERSION 2.8.8)\n') - - # The following appears to be as-yet undocumented. - # http://public.kitware.com/Bug/view.php?id=8392 - out.write('enable_language(ASM)\n\n') - # ASM-ATT does not support .S files. - # output.write('enable_language(ASM-ATT)\n') - - # Current issues with automatic re-generation: - # The gn generated build.ninja target uses build.ninja.d - # but build.ninja.d does not contain the ide or gn. - # Currently the ide is not run if the project.json file is not changed - # but the ide needs to be run anyway if it has itself changed. - # This can be worked around by deleting the project.json file. - out.write('file(READ "') - gn_deps_file = posixpath.join(project.build_path, 'build.ninja.d') - out.write(CMakeStringEscape(gn_deps_file)) - out.write('" "gn_deps_string" OFFSET ') - out.write(str(len('build.ninja: '))) - out.write(')\n') - # One would think this would need to worry about escaped spaces - # but gn doesn't escape spaces here (it generates invalid .d files). - out.write('string(REPLACE " " ";" "gn_deps" ${gn_deps_string})\n') - out.write('foreach("gn_dep" ${gn_deps})\n') - out.write(' configure_file(${gn_dep} "CMakeLists.devnull" COPYONLY)\n') - out.write('endforeach("gn_dep")\n') - - for target_name in project.targets.keys(): - out.write('\n') - WriteTarget(out, Target(target_name, project), project) - - -def main(): - if len(sys.argv) != 2: - print('Usage: ' + sys.argv[0] + ' ') - exit(1) - - json_path = sys.argv[1] - project = None - with open(json_path, 'r') as json_file: - project = json.loads(json_file.read()) - - WriteProject(Project(project)) - - -if __name__ == "__main__": - main() diff --git a/build/android/gradle/java.jinja b/build/android/gradle/java.jinja index 7626f61f7a40..61886e918a29 100644 --- a/build/android/gradle/java.jinja +++ b/build/android/gradle/java.jinja @@ -25,8 +25,8 @@ sourceSets { } } -sourceCompatibility = JavaVersion.VERSION_1_8 -targetCompatibility = JavaVersion.VERSION_1_8 +sourceCompatibility = JavaVersion.VERSION_11 +targetCompatibility = JavaVersion.VERSION_11 {% if template_type == 'java_binary' %} applicationName = "{{ target_name }}" diff --git a/build/android/gradle/root.jinja b/build/android/gradle/root.jinja index 15b5e10184b8..8009ebe0715e 100644 --- a/build/android/gradle/root.jinja +++ b/build/android/gradle/root.jinja @@ -3,24 +3,22 @@ {# found in the LICENSE file. #} // Generated by //build/android/generate_gradle.py +// This section is used to find the plugins. buildscript { repositories { google() - jcenter() -{% if channel == 'canary' %} - // Workaround for http://b/144885480. - //maven() { - // url "http://dl.bintray.com/kotlin/kotlin-eap" - //} -{% endif %} + mavenCentral() } dependencies { -{% if channel == 'canary' %} - classpath "com.android.tools.build:gradle:4.1.0-beta01" -{% elif channel == 'beta' %} - classpath "com.android.tools.build:gradle:4.0.0-rc01" -{% else %} - classpath "com.android.tools.build:gradle:4.0.1" -{% endif %} + classpath "com.android.tools.build:gradle:{{ android_gradle_plugin_version }}" + classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:{{ kotlin_gradle_plugin_version }}" } } + +// This is used by individual modules to find/fetch dependencies. +allprojects { + repositories { + google() + mavenCentral() + } +} \ No newline at end of file diff --git a/build/android/gtest_apk/BUILD.gn b/build/android/gtest_apk/BUILD.gn index 2a72bc47ed24..69b0889c9f41 100644 --- a/build/android/gtest_apk/BUILD.gn +++ b/build/android/gtest_apk/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java b/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java index 652333bdd8ec..7f5c4a8172c2 100644 --- a/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java +++ b/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java @@ -1,4 +1,4 @@ -// Copyright 2014 The Chromium Authors. All rights reserved. +// Copyright 2014 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestIntent.java b/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestIntent.java index a875e9740e4a..202078445c01 100644 --- a/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestIntent.java +++ b/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestIntent.java @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusIntent.java b/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusIntent.java index 520b7485b75a..98ebf443b3d5 100644 --- a/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusIntent.java +++ b/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusIntent.java @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java b/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java index e53900944ef2..71c56a6edfd5 100644 --- a/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java +++ b/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java @@ -1,4 +1,4 @@ -// Copyright 2015 The Chromium Authors. All rights reserved. +// Copyright 2015 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/gyp/OWNERS b/build/android/gyp/OWNERS index 25557e1fc556..df0fa641f838 100644 --- a/build/android/gyp/OWNERS +++ b/build/android/gyp/OWNERS @@ -2,3 +2,5 @@ agrieve@chromium.org digit@chromium.org smaier@chromium.org wnwen@chromium.org + +per-file create_unwind_table*.py=file://base/profiler/OWNERS \ No newline at end of file diff --git a/build/android/gyp/aar.py b/build/android/gyp/aar.py index b157cd816f51..512d5dbe4c7a 100755 --- a/build/android/gyp/aar.py +++ b/build/android/gyp/aar.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -16,9 +16,7 @@ import zipfile from util import build_utils - -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), - os.pardir, os.pardir))) +import action_helpers # build_utils adds //build to sys.path. import gn_helpers @@ -58,11 +56,12 @@ def _IsManifestEmpty(doc): return True -def _CreateInfo(aar_file): +def _CreateInfo(aar_file, resource_exclusion_globs): """Extracts and return .info data from an .aar file. Args: aar_file: Path to an input .aar file. + resource_exclusion_globs: List of globs that exclude res/ files. Returns: A dict containing .info data. @@ -90,7 +89,8 @@ def _CreateInfo(aar_file): if name.startswith('aidl/'): data['aidl'].append(name) elif name.startswith('res/'): - data['resources'].append(name) + if not build_utils.MatchesGlob(name, resource_exclusion_globs): + data['resources'].append(name) elif name.startswith('libs/') and name.endswith('.jar'): label = posixpath.basename(name)[:-4] label = re.sub(r'[^a-zA-Z0-9._]', '_', label) @@ -133,6 +133,11 @@ def _PerformExtract(aar_file, output_dir, name_allowlist): def _AddCommonArgs(parser): parser.add_argument( 'aar_file', help='Path to the AAR file.', type=os.path.normpath) + parser.add_argument('--ignore-resources', + action='store_true', + help='Whether to skip extraction of res/') + parser.add_argument('--resource-exclusion-globs', + help='GN list of globs for res/ files to ignore') def main(): @@ -155,14 +160,15 @@ def main(): help='Path to .info file. Asserts that it matches what ' '"list" would output.', type=argparse.FileType('r')) - subp.add_argument( - '--ignore-resources', - action='store_true', - help='Whether to skip extraction of res/') args = parser.parse_args() - aar_info = _CreateInfo(args.aar_file) + args.resource_exclusion_globs = action_helpers.parse_gn_list( + args.resource_exclusion_globs) + if args.ignore_resources: + args.resource_exclusion_globs.append('res/*') + + aar_info = _CreateInfo(args.aar_file, args.resource_exclusion_globs) formatted_info = """\ # Generated by //build/android/gyp/aar.py # To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen". @@ -177,18 +183,18 @@ def main(): 'out-of-date. Run gn gen with ' 'update_android_aar_prebuilts=true to update it.') + # Extract all files except for filtered res/ files. with zipfile.ZipFile(args.aar_file) as zf: - names = zf.namelist() - if args.ignore_resources: - names = [n for n in names if not n.startswith('res')] + names = {n for n in zf.namelist() if not n.startswith('res/')} + names.update(aar_info['resources']) - _PerformExtract(args.aar_file, args.output_dir, set(names)) + _PerformExtract(args.aar_file, args.output_dir, names) elif args.command == 'list': aar_output_present = args.output != '-' and os.path.isfile(args.output) if aar_output_present: # Some .info files are read-only, for examples the cipd-controlled ones - # under third_party/android_deps/repositoty. To deal with these, first + # under third_party/android_deps/repository. To deal with these, first # that its content is correct, and if it is, exit without touching # the file system. file_info = open(args.output, 'r').read() @@ -203,8 +209,8 @@ def main(): except IOError as e: if not aar_output_present: raise e - raise Exception('Could not update output file: %s\n%s\n' % - (args.output, e)) + raise Exception('Could not update output file: %s\n' % args.output) from e + if __name__ == '__main__': sys.exit(main()) diff --git a/build/android/gyp/aar.pydeps b/build/android/gyp/aar.pydeps index 7e2924b34c02..56f860e25761 100644 --- a/build/android/gyp/aar.pydeps +++ b/build/android/gyp/aar.pydeps @@ -1,5 +1,6 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aar.pydeps build/android/gyp/aar.py +../../action_helpers.py ../../gn_helpers.py aar.py util/__init__.py diff --git a/build/android/gyp/aidl.py b/build/android/gyp/aidl.py index b8099aaecd02..8eab45dd7fc1 100755 --- a/build/android/gyp/aidl.py +++ b/build/android/gyp/aidl.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -14,6 +14,8 @@ import zipfile from util import build_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers def main(argv): @@ -23,10 +25,10 @@ def main(argv): option_parser.add_option('--includes', help='Directories to add as import search paths.') option_parser.add_option('--srcjar', help='Path for srcjar output.') - build_utils.AddDepfileOption(option_parser) + action_helpers.add_depfile_arg(option_parser) options, args = option_parser.parse_args(argv[1:]) - options.includes = build_utils.ParseGnList(options.includes) + options.includes = action_helpers.parse_gn_list(options.includes) with build_utils.TempDir() as temp_dir: for f in args: @@ -34,7 +36,7 @@ def main(argv): output = os.path.join(temp_dir, classname + '.java') aidl_cmd = [options.aidl_path] aidl_cmd += [ - '-p' + s for s in build_utils.ParseGnList(options.imports) + '-p' + s for s in action_helpers.parse_gn_list(options.imports) ] aidl_cmd += ['-I' + s for s in options.includes] aidl_cmd += [ @@ -43,7 +45,7 @@ def main(argv): ] build_utils.CheckOutput(aidl_cmd) - with build_utils.AtomicOutput(options.srcjar) as f: + with action_helpers.atomic_output(options.srcjar) as f: with zipfile.ZipFile(f, 'w') as srcjar: for path in build_utils.FindInDirectory(temp_dir, '*.java'): with open(path) as fileobj: @@ -51,13 +53,13 @@ def main(argv): pkg_name = re.search(r'^\s*package\s+(.*?)\s*;', data, re.M).group(1) arcname = '%s/%s' % ( pkg_name.replace('.', '/'), os.path.basename(path)) - build_utils.AddToZipHermetic(srcjar, arcname, data=data) + zip_helpers.add_to_zip_hermetic(srcjar, arcname, data=data) if options.depfile: include_files = [] for include_dir in options.includes: include_files += build_utils.FindInDirectory(include_dir, '*.java') - build_utils.WriteDepfile(options.depfile, options.srcjar, include_files) + action_helpers.write_depfile(options.depfile, options.srcjar, include_files) if __name__ == '__main__': diff --git a/build/android/gyp/aidl.pydeps b/build/android/gyp/aidl.pydeps index 11c55ed4b6b9..d841c9451fca 100644 --- a/build/android/gyp/aidl.pydeps +++ b/build/android/gyp/aidl.pydeps @@ -1,6 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aidl.pydeps build/android/gyp/aidl.py +../../action_helpers.py ../../gn_helpers.py +../../zip_helpers.py aidl.py util/__init__.py util/build_utils.py diff --git a/build/android/gyp/allot_native_libraries.py b/build/android/gyp/allot_native_libraries.py index 978b17340370..61daac224407 100755 --- a/build/android/gyp/allot_native_libraries.py +++ b/build/android/gyp/allot_native_libraries.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -46,6 +46,7 @@ import sys from util import build_utils +import action_helpers # build_utils adds //build to sys.path. def _ModuleLibrariesPair(arg): @@ -145,7 +146,7 @@ def main(args): help='A pair of parent module name and child module name ' '(format: ":"). Can be specified multiple times.') options = parser.parse_args(build_utils.ExpandFileArgs(args)) - options.libraries = [(m, build_utils.ParseGnList(l)) + options.libraries = [(m, action_helpers.parse_gn_list(l)) for m, l in options.libraries] # Parse input creating libraries and dependency tree. diff --git a/build/android/gyp/allot_native_libraries.pydeps b/build/android/gyp/allot_native_libraries.pydeps index d8b10cd3dacd..aacaafffeb6d 100644 --- a/build/android/gyp/allot_native_libraries.pydeps +++ b/build/android/gyp/allot_native_libraries.pydeps @@ -1,5 +1,6 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/allot_native_libraries.pydeps build/android/gyp/allot_native_libraries.py +../../action_helpers.py ../../gn_helpers.py allot_native_libraries.py util/__init__.py diff --git a/build/android/gyp/apkbuilder.py b/build/android/gyp/apkbuilder.py index 262cc8348a22..fa5701b6db37 100755 --- a/build/android/gyp/apkbuilder.py +++ b/build/android/gyp/apkbuilder.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright (c) 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -9,6 +9,7 @@ import argparse import logging import os +import posixpath import shutil import sys import tempfile @@ -19,10 +20,8 @@ from util import build_utils from util import diff_utils -from util import zipalign - -# Input dex.jar files are zipaligned. -zipalign.ApplyZipFileZipAlignFix() +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers # Taken from aapt's Package.cpp: @@ -35,11 +34,11 @@ def _ParseArgs(args): parser = argparse.ArgumentParser() - build_utils.AddDepfileOption(parser) - parser.add_argument( - '--assets', - help='GYP-list of files to add as assets in the form ' - '"srcPath:zipPath", where ":zipPath" is optional.') + action_helpers.add_depfile_arg(parser) + parser.add_argument('--assets', + action='append', + help='GYP-list of files to add as assets in the form ' + '"srcPath:zipPath", where ":zipPath" is optional.') parser.add_argument( '--java-resources', help='GYP-list of java_resources JARs to include.') parser.add_argument('--write-asset-list', @@ -58,9 +57,6 @@ def _ParseArgs(args): default='apk', help='Specify output format.') parser.add_argument('--dex-file', help='Path to the classes.dex to use') - parser.add_argument( - '--jdk-libs-dex-file', - help='Path to classes.dex created by dex_jdk_libs.py') parser.add_argument('--uncompress-dex', action='store_true', help='Store .dex files uncompressed in the APK') parser.add_argument('--native-libs', @@ -115,43 +111,24 @@ def _ParseArgs(args): '--library-always-compress', action='append', help='The list of library files that we always compress.') - parser.add_argument( - '--library-renames', - action='append', - help='The list of library files that we prepend crazy. to their names.') parser.add_argument('--warnings-as-errors', action='store_true', help='Treat all warnings as errors.') diff_utils.AddCommandLineFlags(parser) options = parser.parse_args(args) - options.assets = build_utils.ParseGnList(options.assets) - options.uncompressed_assets = build_utils.ParseGnList( + options.assets = action_helpers.parse_gn_list(options.assets) + options.uncompressed_assets = action_helpers.parse_gn_list( options.uncompressed_assets) - options.native_lib_placeholders = build_utils.ParseGnList( + options.native_lib_placeholders = action_helpers.parse_gn_list( options.native_lib_placeholders) - options.secondary_native_lib_placeholders = build_utils.ParseGnList( + options.secondary_native_lib_placeholders = action_helpers.parse_gn_list( options.secondary_native_lib_placeholders) - options.java_resources = build_utils.ParseGnList(options.java_resources) - options.native_libs = build_utils.ParseGnList(options.native_libs) - options.secondary_native_libs = build_utils.ParseGnList( + options.java_resources = action_helpers.parse_gn_list(options.java_resources) + options.native_libs = action_helpers.parse_gn_list(options.native_libs) + options.secondary_native_libs = action_helpers.parse_gn_list( options.secondary_native_libs) - options.library_always_compress = build_utils.ParseGnList( + options.library_always_compress = action_helpers.parse_gn_list( options.library_always_compress) - options.library_renames = build_utils.ParseGnList(options.library_renames) - - # --apksigner-jar, --zipalign-path, --key-xxx arguments are - # required when building an APK, but not a bundle module. - if options.format == 'apk': - required_args = [ - 'apksigner_jar', 'zipalign_path', 'key_path', 'key_passwd', 'key_name' - ] - for required in required_args: - if not vars(options)[required]: - raise Exception('Argument --%s is required for APKs.' % ( - required.replace('_', '-'))) - - options.uncompress_shared_libraries = \ - options.uncompress_shared_libraries in [ 'true', 'True' ] if not options.android_abi and (options.native_libs or options.native_lib_placeholders): @@ -203,7 +180,8 @@ def _ExpandPaths(paths): def _GetAssetsToAdd(path_tuples, fast_align, disable_compression=False, - allow_reads=True): + allow_reads=True, + apk_root_dir=''): """Returns the list of file_detail tuples for assets in the apk. Args: @@ -227,12 +205,16 @@ def _GetAssetsToAdd(path_tuples, os.path.splitext(src_path)[1] not in _NO_COMPRESS_EXTENSIONS) if target_compress == compress: - # AddToZipHermetic() uses this logic to avoid growing small files. + # add_to_zip_hermetic() uses this logic to avoid growing small files. # We need it here in order to set alignment correctly. if allow_reads and compress and os.path.getsize(src_path) < 16: compress = False - apk_path = 'assets/' + dest_path + if dest_path.startswith('../'): + # posixpath.join('', 'foo') == 'foo' + apk_path = posixpath.join(apk_root_dir, dest_path[3:]) + else: + apk_path = 'assets/' + dest_path alignment = 0 if compress and not fast_align else 4 assets_to_add.append((apk_path, src_path, compress, alignment)) return assets_to_add @@ -255,16 +237,15 @@ def _AddFiles(apk, details): raise Exception( 'Multiple targets specified the asset path: %s' % apk_path) except KeyError: - zipalign.AddToZipHermetic( - apk, - apk_path, - src_path=src_path, - compress=compress, - alignment=alignment) + zip_helpers.add_to_zip_hermetic(apk, + apk_path, + src_path=src_path, + compress=compress, + alignment=alignment) -def _GetNativeLibrariesToAdd(native_libs, android_abi, uncompress, fast_align, - lib_always_compress, lib_renames): +def _GetNativeLibrariesToAdd(native_libs, android_abi, fast_align, + lib_always_compress): """Returns the list of file_detail tuples for native libraries in the apk. Returns: A list of (src_path, apk_path, compress, alignment) tuple @@ -275,12 +256,7 @@ def _GetNativeLibrariesToAdd(native_libs, android_abi, uncompress, fast_align, for path in native_libs: basename = os.path.basename(path) - compress = not uncompress or any(lib_name in basename - for lib_name in lib_always_compress) - rename = any(lib_name in basename for lib_name in lib_renames) - if rename: - basename = 'crazy.' + basename - + compress = any(lib_name in basename for lib_name in lib_always_compress) lib_android_abi = android_abi if path.startswith('android_clang_arm64_hwasan/'): lib_android_abi = 'arm64-v8a-hwasan' @@ -318,10 +294,11 @@ def main(args): # Compresses about twice as fast as the default. zlib.Z_DEFAULT_COMPRESSION = 1 - # Manually align only when alignment is necessary. # Python's zip implementation duplicates file comments in the central # directory, whereas zipalign does not, so use zipalign for official builds. - fast_align = options.format == 'apk' and not options.best_compression + requires_alignment = options.format == 'apk' + run_zipalign = requires_alignment and options.best_compression + fast_align = bool(requires_alignment and not run_zipalign) native_libs = sorted(options.native_libs) @@ -341,13 +318,13 @@ def main(args): depfile_deps += secondary_native_libs if options.java_resources: - # Included via .build_config, so need to write it to depfile. + # Included via .build_config.json, so need to write it to depfile. depfile_deps.extend(options.java_resources) assets = _ExpandPaths(options.assets) uncompressed_assets = _ExpandPaths(options.uncompressed_assets) - # Included via .build_config, so need to write it to depfile. + # Included via .build_config.json, so need to write it to depfile. depfile_deps.extend(x[0] for x in assets) depfile_deps.extend(x[0] for x in uncompressed_assets) depfile_deps.append(options.resource_apk) @@ -376,23 +353,24 @@ def _GetAssetDetails(assets, uncompressed_assets, fast_align, allow_reads): ret = _GetAssetsToAdd(assets, fast_align, disable_compression=False, - allow_reads=allow_reads) + allow_reads=allow_reads, + apk_root_dir=apk_root_dir) ret.extend( _GetAssetsToAdd(uncompressed_assets, fast_align, disable_compression=True, - allow_reads=allow_reads)) + allow_reads=allow_reads, + apk_root_dir=apk_root_dir)) return ret - libs_to_add = _GetNativeLibrariesToAdd( - native_libs, options.android_abi, options.uncompress_shared_libraries, - fast_align, options.library_always_compress, options.library_renames) + libs_to_add = _GetNativeLibrariesToAdd(native_libs, options.android_abi, + fast_align, + options.library_always_compress) if options.secondary_android_abi: libs_to_add.extend( - _GetNativeLibrariesToAdd( - secondary_native_libs, options.secondary_android_abi, - options.uncompress_shared_libraries, fast_align, - options.library_always_compress, options.library_renames)) + _GetNativeLibrariesToAdd(secondary_native_libs, + options.secondary_android_abi, + fast_align, options.library_always_compress)) if options.expected_file: # We compute expectations without reading the files. This allows us to check @@ -409,9 +387,9 @@ def _GetAssetDetails(assets, uncompressed_assets, fast_align, allow_reads): if options.only_verify_expectations: if options.depfile: - build_utils.WriteDepfile(options.depfile, - options.actual_file, - inputs=depfile_deps) + action_helpers.write_depfile(options.depfile, + options.actual_file, + inputs=depfile_deps) return # If we are past this point, we are going to actually create the final apk so @@ -421,12 +399,13 @@ def _GetAssetDetails(assets, uncompressed_assets, fast_align, allow_reads): assets, uncompressed_assets, fast_align, allow_reads=True) # Targets generally do not depend on apks, so no need for only_if_changed. - with build_utils.AtomicOutput(options.output_apk, only_if_changed=False) as f: + with action_helpers.atomic_output(options.output_apk, + only_if_changed=False) as f: with zipfile.ZipFile(options.resource_apk) as resource_apk, \ zipfile.ZipFile(f, 'w') as out_apk: def add_to_zip(zip_path, data, compress=True, alignment=4): - zipalign.AddToZipHermetic( + zip_helpers.add_to_zip_hermetic( out_apk, zip_path, data=data, @@ -473,13 +452,6 @@ def copy_resource(zipinfo, out_dir=''): dex_zip.read(dex), compress=not options.uncompress_dex) - if options.jdk_libs_dex_file: - with open(options.jdk_libs_dex_file, 'rb') as dex_file_obj: - add_to_zip( - apk_dex_dir + 'classes{}.dex'.format(max_dex_number + 1), - dex_file_obj.read(), - compress=not options.uncompress_dex) - # 4. Native libraries. logging.debug('Adding lib/') _AddFiles(out_apk, libs_to_add) @@ -538,7 +510,7 @@ def copy_resource(zipinfo, out_dir=''): add_to_zip(apk_root_dir + apk_path, java_resource_jar.read(apk_path)) - if options.format == 'apk': + if options.format == 'apk' and options.key_path: zipalign_path = None if fast_align else options.zipalign_path finalize_apk.FinalizeApk(options.apksigner_jar, zipalign_path, @@ -552,9 +524,9 @@ def copy_resource(zipinfo, out_dir=''): logging.debug('Moving file into place') if options.depfile: - build_utils.WriteDepfile(options.depfile, - options.output_apk, - inputs=depfile_deps) + action_helpers.write_depfile(options.depfile, + options.output_apk, + inputs=depfile_deps) if __name__ == '__main__': diff --git a/build/android/gyp/apkbuilder.pydeps b/build/android/gyp/apkbuilder.pydeps index e6122edd2f1e..28dfdb035496 100644 --- a/build/android/gyp/apkbuilder.pydeps +++ b/build/android/gyp/apkbuilder.pydeps @@ -1,9 +1,10 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/apkbuilder.pydeps build/android/gyp/apkbuilder.py +../../action_helpers.py ../../gn_helpers.py +../../zip_helpers.py apkbuilder.py finalize_apk.py util/__init__.py util/build_utils.py util/diff_utils.py -util/zipalign.py diff --git a/build/android/gyp/assert_static_initializers.py b/build/android/gyp/assert_static_initializers.py index a6f74ba5f5c2..fd0bb02ac80b 100755 --- a/build/android/gyp/assert_static_initializers.py +++ b/build/android/gyp/assert_static_initializers.py @@ -1,11 +1,10 @@ #!/usr/bin/env python3 -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Checks the number of static initializers in an APK's library.""" -from __future__ import print_function import argparse import os @@ -23,8 +22,9 @@ def _RunReadelf(so_path, options, tool_prefix=''): - return subprocess.check_output([tool_prefix + 'readelf'] + options + - [so_path]).decode('utf8') + return subprocess.check_output( + [tool_prefix + 'readobj', '--elf-output-style=GNU'] + options + + [so_path]).decode('utf8') def _ParseLibBuildId(so_path, tool_prefix): @@ -40,26 +40,16 @@ def _VerifyLibBuildIdsMatch(tool_prefix, *so_files): 'Your output directory is likely stale.') -def _GetStaticInitializers(so_path, tool_prefix): - output = subprocess.check_output( - [_DUMP_STATIC_INITIALIZERS_PATH, '-d', so_path, '-t', tool_prefix]) - summary = re.search(r'Found \d+ static initializers in (\d+) files.', output) - return output.splitlines()[:-1], int(summary.group(1)) - - -def _PrintDumpSIsCount(apk_so_name, unzipped_so, out_dir, tool_prefix): - lib_name = os.path.basename(apk_so_name).replace('crazy.', '') - so_with_symbols_path = os.path.join(out_dir, 'lib.unstripped', lib_name) +def _DumpStaticInitializers(apk_so_name, unzipped_so, out_dir, tool_prefix): + so_with_symbols_path = os.path.join(out_dir, 'lib.unstripped', + os.path.basename(apk_so_name)) if not os.path.exists(so_with_symbols_path): - raise Exception('Unstripped .so not found. Looked here: %s', + raise Exception('Unstripped .so not found. Looked here: %s' % so_with_symbols_path) _VerifyLibBuildIdsMatch(tool_prefix, unzipped_so, so_with_symbols_path) - sis, _ = _GetStaticInitializers(so_with_symbols_path, tool_prefix) - for si in sis: - print(si) + subprocess.check_call([_DUMP_STATIC_INITIALIZERS_PATH, so_with_symbols_path]) -# Mostly copied from //infra/scripts/legacy/scripts/slave/chromium/sizes.py. def _ReadInitArray(so_path, tool_prefix, expect_no_initializers): stdout = _RunReadelf(so_path, ['-SW'], tool_prefix) # Matches: .init_array INIT_ARRAY 000000000516add0 5169dd0 000010 00 WA 0 0 8 @@ -68,9 +58,8 @@ def _ReadInitArray(so_path, tool_prefix, expect_no_initializers): if match: raise Exception( 'Expected no initializers for %s, yet some were found' % so_path) - else: - return 0 - elif not match: + return 0 + if not match: raise Exception('Did not find section: .init_array in {}:\n{}'.format( so_path, stdout)) size_str = re.split(r'\W+', match.group(0))[5] @@ -92,13 +81,12 @@ def _CountStaticInitializers(so_path, tool_prefix, expect_no_initializers): # NOTE: this is very implementation-specific and makes assumptions # about how compiler and linker implement global static initializers. init_array_size = _ReadInitArray(so_path, tool_prefix, expect_no_initializers) - return init_array_size / word_size + assert init_array_size % word_size == 0 + return init_array_size // word_size def _AnalyzeStaticInitializers(apk_or_aab, tool_prefix, dump_sis, out_dir, ignored_libs, no_initializers_libs): - # Static initializer counting mostly copies logic in - # infra/scripts/legacy/scripts/slave/chromium/sizes.py. with zipfile.ZipFile(apk_or_aab) as z: so_files = [ f for f in z.infolist() if f.filename.endswith('.so') @@ -127,10 +115,7 @@ def _AnalyzeStaticInitializers(apk_or_aab, tool_prefix, dump_sis, out_dir, si_count += _CountStaticInitializers(temp.name, tool_prefix, expect_no_initializers) if dump_sis: - # Print count and list of SIs reported by dump-static-initializers.py. - # Doesn't work well on all archs (particularly arm), which is why - # the readelf method is used for tracking SI counts. - _PrintDumpSIsCount(f.filename, temp.name, out_dir, tool_prefix) + _DumpStaticInitializers(f.filename, temp.name, out_dir, tool_prefix) return si_count @@ -164,18 +149,16 @@ def main(): print('You have removed one or more static initializers. Thanks!') print('To fix the build, update the expectation in:') print(' //chrome/android/static_initializers.gni') - else: - print('Dumping static initializers via dump-static-initializers.py:') - sys.stdout.flush() - _AnalyzeStaticInitializers(args.apk_or_aab, args.tool_prefix, True, '.', - ignored_libs, no_initializers_libs) print() - print('If the above list is not useful, consider listing them with:') - print(' //tools/binary_size/diagnose_bloat.py') - print() - print('For more information:') - print(' https://chromium.googlesource.com/chromium/src/+/main/docs/' - 'static_initializers.md') + + print('Dumping static initializers via dump-static-initializers.py:') + sys.stdout.flush() + _AnalyzeStaticInitializers(args.apk_or_aab, args.tool_prefix, True, '.', + ignored_libs, no_initializers_libs) + print() + print('For more information:') + print(' https://chromium.googlesource.com/chromium/src/+/main/docs/' + 'static_initializers.md') sys.exit(1) if args.touch: diff --git a/build/android/gyp/binary_baseline_profile.py b/build/android/gyp/binary_baseline_profile.py new file mode 100755 index 000000000000..40498050a61d --- /dev/null +++ b/build/android/gyp/binary_baseline_profile.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Creates a binary profile from an HRF + dex + mapping.""" + +import argparse +import sys + +from util import build_utils +import action_helpers + + +def main(args): + parser = argparse.ArgumentParser(description=__doc__) + action_helpers.add_depfile_arg(parser) + parser.add_argument('--output-profile', + required=True, + help='Path to output binary profile.') + parser.add_argument('--output-metadata', + required=True, + help='Path to output binary profile metadata.') + parser.add_argument('--profgen', + required=True, + help='Path to profgen binary.') + parser.add_argument('--dex', + required=True, + help='Path to a zip containing release dex files.') + parser.add_argument('--proguard-mapping', + required=True, + help='Path to proguard mapping for release dex.') + parser.add_argument('--input-profile-path', + required=True, + help='Path to HRF baseline profile to apply.') + options = parser.parse_args(build_utils.ExpandFileArgs(args)) + + cmd = [ + options.profgen, + 'bin', + options.input_profile_path, + '-o', + options.output_profile, + '-om', + options.output_metadata, + '-a', + options.dex, + '-m', + options.proguard_mapping, + ] + build_utils.CheckOutput(cmd, env={'JAVA_HOME': build_utils.JAVA_HOME}) + action_helpers.write_depfile(options.depfile, + options.output_profile, + inputs=[options.dex]) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/binary_baseline_profile.pydeps b/build/android/gyp/binary_baseline_profile.pydeps new file mode 100644 index 000000000000..944f6abed8a8 --- /dev/null +++ b/build/android/gyp/binary_baseline_profile.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/binary_baseline_profile.pydeps build/android/gyp/binary_baseline_profile.py +../../action_helpers.py +../../gn_helpers.py +binary_baseline_profile.py +util/__init__.py +util/build_utils.py diff --git a/build/android/gyp/bundletool.py b/build/android/gyp/bundletool.py index dc9b86ae97bc..79151335ce37 100755 --- a/build/android/gyp/bundletool.py +++ b/build/android/gyp/bundletool.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -8,6 +8,8 @@ Bundletool is distributed as a versioned jar file. This script abstracts the location and version of this jar file, as well as the JVM invokation.""" +# Warning: Check if still being run as python2: https://crbug.com/1322618 + import logging import os import sys @@ -19,18 +21,13 @@ __file__, '..', '..', '..', '..', 'third_party', 'android_build_tools', 'bundletool')) -BUNDLETOOL_VERSION = '1.4.0' - -BUNDLETOOL_JAR_PATH = os.path.join( - BUNDLETOOL_DIR, 'bundletool-all-%s.jar' % BUNDLETOOL_VERSION) +BUNDLETOOL_JAR_PATH = os.path.join(BUNDLETOOL_DIR, 'bundletool.jar') -def RunBundleTool(args, warnings_as_errors=(), print_stdout=False): - # Use () instead of None because command-line flags are None by default. - verify = warnings_as_errors == () or warnings_as_errors +def RunBundleTool(args, print_stdout=False): # ASAN builds failed with the default of 1GB (crbug.com/1120202). # Bug for bundletool: https://issuetracker.google.com/issues/165911616 - cmd = build_utils.JavaCmd(verify, xmx='4G') + cmd = build_utils.JavaCmd(xmx='4G') cmd += ['-jar', BUNDLETOOL_JAR_PATH] cmd += args logging.debug(' '.join(cmd)) diff --git a/build/android/gyp/bytecode_processor.py b/build/android/gyp/bytecode_processor.py index d77f159d8267..f6065dbe7154 100755 --- a/build/android/gyp/bytecode_processor.py +++ b/build/android/gyp/bytecode_processor.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -8,8 +8,10 @@ import argparse import sys +import javac_output_processor from util import build_utils from util import server_utils +import action_helpers # build_utils adds //build to sys.path. def _AddSwitch(parser, val): @@ -21,6 +23,9 @@ def main(argv): argv = build_utils.ExpandFileArgs(argv[1:]) parser = argparse.ArgumentParser() parser.add_argument('--target-name', help='Fully qualified GN target name.') + parser.add_argument('--use-build-server', + action='store_true', + help='Always use the build server.') parser.add_argument('--script', required=True, help='Path to the java binary wrapper script.') parser.add_argument('--gn-target', required=True) @@ -40,16 +45,19 @@ def main(argv): if server_utils.MaybeRunCommand(name=args.target_name, argv=sys.argv, - stamp_file=args.stamp): + stamp_file=args.stamp, + force=args.use_build_server): return - args.sdk_classpath_jars = build_utils.ParseGnList(args.sdk_classpath_jars) - args.direct_classpath_jars = build_utils.ParseGnList( + args.sdk_classpath_jars = action_helpers.parse_gn_list( + args.sdk_classpath_jars) + args.direct_classpath_jars = action_helpers.parse_gn_list( args.direct_classpath_jars) - args.full_classpath_jars = build_utils.ParseGnList(args.full_classpath_jars) - args.full_classpath_gn_targets = build_utils.ParseGnList( + args.full_classpath_jars = action_helpers.parse_gn_list( + args.full_classpath_jars) + args.full_classpath_gn_targets = action_helpers.parse_gn_list( args.full_classpath_gn_targets) - args.missing_classes_allowlist = build_utils.ParseGnList( + args.missing_classes_allowlist = action_helpers.parse_gn_list( args.missing_classes_allowlist) verbose = '--verbose' if args.verbose else '--not-verbose' @@ -64,11 +72,20 @@ def main(argv): cmd += [str(len(args.full_classpath_jars))] cmd += args.full_classpath_jars cmd += [str(len(args.full_classpath_gn_targets))] - cmd += args.full_classpath_gn_targets - build_utils.CheckOutput(cmd, - print_stdout=True, - fail_func=None, - fail_on_output=args.warnings_as_errors) + cmd += [ + javac_output_processor.ReplaceGmsPackageIfNeeded(t) + for t in args.full_classpath_gn_targets + ] + try: + build_utils.CheckOutput(cmd, + print_stdout=True, + fail_func=None, + fail_on_output=args.warnings_as_errors) + except build_utils.CalledProcessError as e: + # Do not output command line because it is massive and makes the actual + # error message hard to find. + sys.stderr.write(e.output) + sys.exit(1) if args.stamp: build_utils.Touch(args.stamp) diff --git a/build/android/gyp/bytecode_processor.pydeps b/build/android/gyp/bytecode_processor.pydeps index 6105d934da11..e7f1d98bdfdb 100644 --- a/build/android/gyp/bytecode_processor.pydeps +++ b/build/android/gyp/bytecode_processor.pydeps @@ -1,7 +1,28 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/bytecode_processor.pydeps build/android/gyp/bytecode_processor.py +../../../third_party/catapult/devil/devil/__init__.py +../../../third_party/catapult/devil/devil/android/__init__.py +../../../third_party/catapult/devil/devil/android/constants/__init__.py +../../../third_party/catapult/devil/devil/android/constants/chrome.py +../../../third_party/catapult/devil/devil/android/sdk/__init__.py +../../../third_party/catapult/devil/devil/android/sdk/keyevent.py +../../../third_party/catapult/devil/devil/android/sdk/version_codes.py +../../../third_party/catapult/devil/devil/constants/__init__.py +../../../third_party/catapult/devil/devil/constants/exit_codes.py +../../../third_party/colorama/src/colorama/__init__.py +../../../third_party/colorama/src/colorama/ansi.py +../../../third_party/colorama/src/colorama/ansitowin32.py +../../../third_party/colorama/src/colorama/initialise.py +../../../third_party/colorama/src/colorama/win32.py +../../../third_party/colorama/src/colorama/winterm.py +../../../tools/android/modularization/convenience/lookup_dep.py +../../action_helpers.py ../../gn_helpers.py +../list_java_targets.py +../pylib/__init__.py +../pylib/constants/__init__.py bytecode_processor.py +javac_output_processor.py util/__init__.py util/build_utils.py util/server_utils.py diff --git a/build/android/gyp/bytecode_rewriter.py b/build/android/gyp/bytecode_rewriter.py index ad232df038cd..d16fee5237e4 100755 --- a/build/android/gyp/bytecode_rewriter.py +++ b/build/android/gyp/bytecode_rewriter.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Wrapper script around ByteCodeRewriter subclass scripts.""" @@ -8,12 +8,13 @@ import sys from util import build_utils +import action_helpers # build_utils adds //build to sys.path. def main(argv): argv = build_utils.ExpandFileArgs(argv[1:]) parser = argparse.ArgumentParser() - build_utils.AddDepfileOption(parser) + action_helpers.add_depfile_arg(parser) parser.add_argument('--script', required=True, help='Path to the java binary wrapper script.') @@ -22,8 +23,8 @@ def main(argv): parser.add_argument('--output-jar', required=True) args = parser.parse_args(argv) - classpath = build_utils.ParseGnList(args.classpath) - build_utils.WriteDepfile(args.depfile, args.output_jar, inputs=classpath) + classpath = action_helpers.parse_gn_list(args.classpath) + action_helpers.write_depfile(args.depfile, args.output_jar, inputs=classpath) classpath.append(args.input_jar) cmd = [ diff --git a/build/android/gyp/bytecode_rewriter.pydeps b/build/android/gyp/bytecode_rewriter.pydeps index b8f304a78362..b0a656036738 100644 --- a/build/android/gyp/bytecode_rewriter.pydeps +++ b/build/android/gyp/bytecode_rewriter.pydeps @@ -1,5 +1,6 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/bytecode_rewriter.pydeps build/android/gyp/bytecode_rewriter.py +../../action_helpers.py ../../gn_helpers.py bytecode_rewriter.py util/__init__.py diff --git a/build/android/gyp/check_flag_expectations.py b/build/android/gyp/check_flag_expectations.py index 22da211f3634..97be53d864ee 100755 --- a/build/android/gyp/check_flag_expectations.py +++ b/build/android/gyp/check_flag_expectations.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2021 The Chromium Authors. All rights reserved. +# Copyright 2021 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/gyp/check_flag_expectations.pydeps b/build/android/gyp/check_flag_expectations.pydeps index d8c394a04c13..6bade9490b5c 100644 --- a/build/android/gyp/check_flag_expectations.pydeps +++ b/build/android/gyp/check_flag_expectations.pydeps @@ -1,5 +1,6 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/check_flag_expectations.pydeps build/android/gyp/check_flag_expectations.py +../../action_helpers.py ../../gn_helpers.py check_flag_expectations.py util/__init__.py diff --git a/build/android/gyp/compile_java.py b/build/android/gyp/compile_java.py index 2a92842edec0..5fee0d77c3ef 100755 --- a/build/android/gyp/compile_java.py +++ b/build/android/gyp/compile_java.py @@ -1,9 +1,10 @@ #!/usr/bin/env python3 # -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +import functools import logging import multiprocessing import optparse @@ -13,16 +14,15 @@ import sys import time import zipfile +import pathlib +import javac_output_processor from util import build_utils from util import md5_check from util import jar_info_utils from util import server_utils - -sys.path.insert( - 0, - os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src')) -import colorama +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers _JAVAC_EXTRACTOR = os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'android_prebuilts', 'build_tools', 'common', @@ -34,6 +34,9 @@ # Full list of checks: https://errorprone.info/bugpatterns ERRORPRONE_WARNINGS_TO_DISABLE = [ + # Temporarily disabling to roll doubledown. + # TODO(wnwen): Re-enable this upstream. + 'InlineMeInliner', # The following are super useful, but existing issues need to be fixed first # before they can start failing the build on new errors. 'InvalidParam', @@ -47,6 +50,21 @@ 'UnescapedEntity', 'NonCanonicalType', 'AlmostJavadoc', + 'ReturnValueIgnored', + # The following are added for errorprone update: https://crbug.com/1216032 + 'InlineMeSuggester', + 'DoNotClaimAnnotations', + 'JavaUtilDate', + 'IdentityHashMapUsage', + 'UnnecessaryMethodReference', + 'LongFloatConversion', + 'CharacterGetNumericValue', + 'ErroneousThreadPoolConstructorChecker', + 'StaticMockMember', + 'MissingSuperCall', + 'ToStringReturnsNull', + # If possible, this should be automatically fixed if turned on: + 'MalformedInlineTag', # TODO(crbug.com/834807): Follow steps in bug 'DoubleBraceInitialization', # TODO(crbug.com/834790): Follow steps in bug. @@ -62,6 +80,8 @@ # Android platform default is always UTF-8. # https://developer.android.com/reference/java/nio/charset/Charset.html#defaultCharset() 'DefaultCharset', + # Low priority since there are lots of tags that don't fit this check. + 'UnrecognisedJavadocTag', # Low priority since the alternatives still work. 'JdkObsolete', # We don't use that many lambdas. @@ -167,6 +187,11 @@ 'RemoveUnusedImports', # We do not care about unnecessary parenthesis enough to check for them. 'UnnecessaryParentheses', + # The only time we trigger this is when it is better to be explicit in a + # list of unicode characters, e.g. FindAddress.java + 'UnicodeEscape', + # Nice to have. + 'AlreadyChecked', ] # Full list of checks: https://errorprone.info/bugpatterns @@ -179,7 +204,6 @@ 'InvalidThrows', 'LongLiteralLowerCaseSuffix', 'MultiVariableDeclaration', - 'ParameterNotNullable', 'RedundantOverride', 'StaticQualifiedUsingExpression', 'StringEquality', @@ -190,14 +214,7 @@ ] -def ProcessJavacOutput(output): - fileline_prefix = r'(?P(?P[-.\w/\\]+.java):(?P[0-9]+):)' - warning_re = re.compile(fileline_prefix + - r'(?P warning: (?P.*))$') - error_re = re.compile(fileline_prefix + - r'(?P (?P.*))$') - marker_re = re.compile(r'\s*(?P\^)\s*$') - +def ProcessJavacOutput(output, target_name): # These warnings cannot be suppressed even for third party code. Deprecation # warnings especially do not help since we must support older android version. deprecated_re = re.compile( @@ -208,17 +225,6 @@ def ProcessJavacOutput(output): activity_re = re.compile(r'^(?P\s*location: )class Activity$') - warning_color = ['full_message', colorama.Fore.YELLOW + colorama.Style.DIM] - error_color = ['full_message', colorama.Fore.MAGENTA + colorama.Style.BRIGHT] - marker_color = ['marker', colorama.Fore.BLUE + colorama.Style.BRIGHT] - - def Colorize(line, regex, color): - match = regex.match(line) - start = match.start(color[0]) - end = match.end(color[0]) - return (line[:start] + color[1] + line[start:end] + colorama.Fore.RESET + - colorama.Style.RESET_ALL + line[end:]) - def ApplyFilters(line): return not (deprecated_re.match(line) or unchecked_re.match(line) or recompile_re.match(line)) @@ -230,31 +236,73 @@ def Elaborate(line): line, prefix, 'docs/ui/android/bytecode_rewriting.md') return line - def ApplyColors(line): - if warning_re.match(line): - line = Colorize(line, warning_re, warning_color) - elif error_re.match(line): - line = Colorize(line, error_re, error_color) - elif marker_re.match(line): - line = Colorize(line, marker_re, marker_color) - return line + output = build_utils.FilterReflectiveAccessJavaWarnings(output) + + # Warning currently cannot be silenced via javac flag. + if 'Unsafe is internal proprietary API' in output: + # Example: + # HiddenApiBypass.java:69: warning: Unsafe is internal proprietary API and + # may be removed in a future release + # import sun.misc.Unsafe; + # ^ + output = re.sub(r'.*?Unsafe is internal proprietary API[\s\S]*?\^\n', '', + output) + output = re.sub(r'\d+ warnings\n', '', output) lines = (l for l in output.split('\n') if ApplyFilters(l)) - lines = (ApplyColors(Elaborate(l)) for l in lines) + lines = (Elaborate(l) for l in lines) + + output_processor = javac_output_processor.JavacOutputProcessor(target_name) + lines = output_processor.Process(lines) + return '\n'.join(lines) -def _ParsePackageAndClassNames(java_file): +def CreateJarFile(jar_path, + classes_dir, + service_provider_configuration_dir=None, + additional_jar_files=None, + extra_classes_jar=None): + """Zips files from compilation into a single jar.""" + logging.info('Start creating jar file: %s', jar_path) + with action_helpers.atomic_output(jar_path) as f: + with zipfile.ZipFile(f.name, 'w') as z: + zip_helpers.zip_directory(z, classes_dir) + if service_provider_configuration_dir: + config_files = build_utils.FindInDirectory( + service_provider_configuration_dir) + for config_file in config_files: + zip_path = os.path.relpath(config_file, + service_provider_configuration_dir) + zip_helpers.add_to_zip_hermetic(z, zip_path, src_path=config_file) + + if additional_jar_files: + for src_path, zip_path in additional_jar_files: + zip_helpers.add_to_zip_hermetic(z, zip_path, src_path=src_path) + if extra_classes_jar: + path_transform = lambda p: p if p.endswith('.class') else None + zip_helpers.merge_zips(z, [extra_classes_jar], + path_transform=path_transform) + logging.info('Completed jar file: %s', jar_path) + + +def _ParsePackageAndClassNames(source_file): + """This should support both Java and Kotlin files.""" package_name = '' class_names = [] - with open(java_file) as f: + with open(source_file) as f: for l in f: # Strip unindented comments. # Considers a leading * as a continuation of a multi-line comment (our # linter doesn't enforce a space before it like there should be). l = re.sub(r'^(?://.*|/?\*.*?(?:\*/\s*|$))', '', l) + # Stripping things between double quotes (strings), so if the word "class" + # shows up in a string this doesn't trigger. This isn't strictly correct + # (with escaped quotes) but covers a very large percentage of cases. + l = re.sub('(?:".*?")', '', l) - m = re.match(r'package\s+(.*?);', l) + # Java lines end in semicolon, whereas Kotlin lines do not. + m = re.match(r'package\s+(.*?)(;|\s*$)', l) if m and not package_name: package_name = m.group(1) @@ -266,12 +314,12 @@ def _ParsePackageAndClassNames(java_file): return package_name, class_names -def _ProcessJavaFileForInfo(java_file): - package_name, class_names = _ParsePackageAndClassNames(java_file) - return java_file, package_name, class_names +def _ProcessSourceFileForInfo(source_file): + package_name, class_names = _ParsePackageAndClassNames(source_file) + return source_file, package_name, class_names -class _InfoFileContext(object): +class _InfoFileContext: """Manages the creation of the class->source file .info file.""" def __init__(self, chromium_code, excluded_globs): @@ -291,23 +339,29 @@ def AddSrcJarSources(self, srcjar_path, extracted_paths, parent_dir): self._srcjar_files[path] = '{}/{}'.format( srcjar_path, os.path.relpath(path, parent_dir)) - def SubmitFiles(self, java_files): + def SubmitFiles(self, source_files): + if not source_files: + return if self._pool is None: # Restrict to just one process to not slow down compiling. Compiling # is always slower. self._pool = multiprocessing.Pool(1) - logging.info('Submitting %d files for info', len(java_files)) + logging.info('Submitting %d files for info', len(source_files)) self._results.append( - self._pool.imap_unordered( - _ProcessJavaFileForInfo, java_files, chunksize=1000)) - - def _CheckPathMatchesClassName(self, java_file, package_name, class_name): - parts = package_name.split('.') + [class_name + '.java'] - expected_path_suffix = os.path.sep.join(parts) - if not java_file.endswith(expected_path_suffix): - raise Exception(('Java package+class name do not match its path.\n' + self._pool.imap_unordered(_ProcessSourceFileForInfo, + source_files, + chunksize=1000)) + + def _CheckPathMatchesClassName(self, source_file, package_name, class_name): + if source_file.endswith('.java'): + parts = package_name.split('.') + [class_name + '.java'] + else: + parts = package_name.split('.') + [class_name + '.kt'] + expected_suffix = os.path.sep.join(parts) + if not source_file.endswith(expected_suffix): + raise Exception(('Source package+class name do not match its path.\n' 'Actual path: %s\nExpected path: %s') % - (java_file, expected_path_suffix)) + (source_file, expected_suffix)) def _ProcessInfo(self, java_file, package_name, class_names, source): for class_name in class_names: @@ -336,10 +390,9 @@ def _Collect(self): class_names, source): if self._ShouldIncludeInJarInfo(fully_qualified_name): ret[fully_qualified_name] = java_file - self._pool.terminate() return ret - def __del__(self): + def Close(self): # Work around for Python 2.x bug with multiprocessing and daemon threads: # https://bugs.python.org/issue4106 if self._pool is not None: @@ -358,32 +411,12 @@ def Commit(self, output_path): entries = self._Collect() logging.info('Writing info file: %s', output_path) - with build_utils.AtomicOutput(output_path, mode='wb') as f: + with action_helpers.atomic_output(output_path, mode='wb') as f: jar_info_utils.WriteJarInfoFile(f, entries, self._srcjar_files) logging.info('Completed info file: %s', output_path) -def _CreateJarFile(jar_path, service_provider_configuration_dir, - additional_jar_files, classes_dir): - logging.info('Start creating jar file: %s', jar_path) - with build_utils.AtomicOutput(jar_path) as f: - with zipfile.ZipFile(f.name, 'w') as z: - build_utils.ZipDir(z, classes_dir) - if service_provider_configuration_dir: - config_files = build_utils.FindInDirectory( - service_provider_configuration_dir) - for config_file in config_files: - zip_path = os.path.relpath(config_file, - service_provider_configuration_dir) - build_utils.AddToZipHermetic(z, zip_path, src_path=config_file) - - if additional_jar_files: - for src_path, zip_path in additional_jar_files: - build_utils.AddToZipHermetic(z, zip_path, src_path=src_path) - logging.info('Completed jar file: %s', jar_path) - - -def _OnStaleMd5(options, javac_cmd, javac_args, java_files): +def _OnStaleMd5(changes, options, javac_cmd, javac_args, java_files, kt_files): logging.info('Starting _OnStaleMd5') if options.enable_kythe_annotations: # Kythe requires those env variables to be set and compile_java.py does the @@ -394,68 +427,146 @@ def _OnStaleMd5(options, javac_cmd, javac_args, java_files): 'KYTHE_ROOT_DIRECTORY and KYTHE_OUTPUT_DIRECTORY ' 'environment variables to be set.') javac_extractor_cmd = build_utils.JavaCmd() + [ + '--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED', + '--add-exports=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED', + '--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED', + '--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED', + '--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED', '-jar', _JAVAC_EXTRACTOR, ] try: - _RunCompiler(options, javac_extractor_cmd + javac_args, java_files, - options.classpath, options.jar_path + '.javac_extractor', - save_outputs=False), + # _RunCompiler()'s partial javac implementation does not support + # generating outputs in $KYTHE_OUTPUT_DIRECTORY. + _RunCompiler(changes, + options, + javac_extractor_cmd + javac_args, + java_files, + options.jar_path + '.javac_extractor', + enable_partial_javac=False) except build_utils.CalledProcessError as e: # Having no index for particular target is better than failing entire # codesearch. Log and error and move on. logging.error('Could not generate kzip: %s', e) + intermediates_out_dir = None + jar_info_path = None + if not options.enable_errorprone: + # Delete any stale files in the generated directory. The purpose of + # options.generated_dir is for codesearch. + shutil.rmtree(options.generated_dir, True) + intermediates_out_dir = options.generated_dir + + jar_info_path = options.jar_path + '.info' + # Compiles with Error Prone take twice as long to run as pure javac. Thus GN # rules run both in parallel, with Error Prone only used for checks. - _RunCompiler(options, - javac_cmd + javac_args, - java_files, - options.classpath, - options.jar_path, - save_outputs=not options.enable_errorprone) + try: + _RunCompiler(changes, + options, + javac_cmd + javac_args, + java_files, + options.jar_path, + kt_files=kt_files, + jar_info_path=jar_info_path, + intermediates_out_dir=intermediates_out_dir, + enable_partial_javac=True) + except build_utils.CalledProcessError as e: + # Do not output stacktrace as it takes up space on gerrit UI, forcing + # you to click though to find the actual compilation error. It's never + # interesting to see the Python stacktrace for a Java compilation error. + sys.stderr.write(e.output) + sys.exit(1) + logging.info('Completed all steps in _OnStaleMd5') -def _RunCompiler(options, javac_cmd, java_files, classpath, jar_path, - save_outputs=True): +def _RunCompiler(changes, + options, + javac_cmd, + java_files, + jar_path, + kt_files=None, + jar_info_path=None, + intermediates_out_dir=None, + enable_partial_javac=False): + """Runs java compiler. + + Args: + changes: md5_check.Changes object. + options: Object with command line flags. + javac_cmd: Command to execute. + java_files: List of java files passed from command line. + jar_path: Path of output jar file. + kt_files: List of Kotlin files passed from command line if any. + jar_info_path: Path of the .info file to generate. + If None, .info file will not be generated. + intermediates_out_dir: Directory for saving intermediate outputs. + If None a temporary directory is used. + enable_partial_javac: Enables compiling only Java files which have changed + in the special case that no method signatures have changed. This is + useful for large GN targets. + Not supported if compiling generates outputs other than |jar_path| and + |jar_info_path|. + """ logging.info('Starting _RunCompiler') - # Compiles with Error Prone take twice as long to run as pure javac. Thus GN - # rules run both in parallel, with Error Prone only used for checks. - save_outputs = not options.enable_errorprone + java_files = java_files.copy() + java_srcjars = options.java_srcjars + save_info_file = jar_info_path is not None # Use jar_path's directory to ensure paths are relative (needed for goma). temp_dir = jar_path + '.staging' - shutil.rmtree(temp_dir, True) + build_utils.DeleteDirectory(temp_dir) os.makedirs(temp_dir) + info_file_context = None try: classes_dir = os.path.join(temp_dir, 'classes') service_provider_configuration = os.path.join( temp_dir, 'service_provider_configuration') - if save_outputs: - input_srcjars_dir = os.path.join(options.generated_dir, 'input_srcjars') - annotation_processor_outputs_dir = os.path.join( - options.generated_dir, 'annotation_processor_outputs') - # Delete any stale files in the generated directory. The purpose of - # options.generated_dir is for codesearch. - shutil.rmtree(options.generated_dir, True) + if java_files: + os.makedirs(classes_dir) + + if enable_partial_javac: + all_changed_paths_are_java = all( + p.endswith(".java") for p in changes.IterChangedPaths()) + if (all_changed_paths_are_java and not changes.HasStringChanges() + and os.path.exists(jar_path) + and (jar_info_path is None or os.path.exists(jar_info_path))): + # Log message is used by tests to determine whether partial javac + # optimization was used. + logging.info('Using partial javac optimization for %s compile' % + (jar_path)) + + # Header jar corresponding to |java_files| did not change. + # As a build speed optimization (crbug.com/1170778), re-compile only + # java files which have changed. Re-use old jar .info file. + java_files = list(changes.IterChangedPaths()) + java_srcjars = None + + # Reuse old .info file. + save_info_file = False + + build_utils.ExtractAll(jar_path, classes_dir, pattern='*.class') + + if save_info_file: info_file_context = _InfoFileContext(options.chromium_code, options.jar_info_exclude_globs) - else: - input_srcjars_dir = os.path.join(temp_dir, 'input_srcjars') - annotation_processor_outputs_dir = os.path.join( - temp_dir, 'annotation_processor_outputs') - if options.java_srcjars: + if intermediates_out_dir is None: + intermediates_out_dir = temp_dir + + input_srcjars_dir = os.path.join(intermediates_out_dir, 'input_srcjars') + + if java_srcjars: logging.info('Extracting srcjars to %s', input_srcjars_dir) build_utils.MakeDirectory(input_srcjars_dir) for srcjar in options.java_srcjars: extracted_files = build_utils.ExtractAll( srcjar, no_clobber=True, path=input_srcjars_dir, pattern='*.java') java_files.extend(extracted_files) - if save_outputs: + if save_info_file: info_file_context.AddSrcJarSources(srcjar, extracted_files, input_srcjars_dir) logging.info('Done extracting srcjars') @@ -470,22 +581,18 @@ def _RunCompiler(options, javac_cmd, java_files, classpath, jar_path, pattern='META-INF/services/*') logging.info('Done extracting service provider configs') - if save_outputs and java_files: + if save_info_file and java_files: info_file_context.SubmitFiles(java_files) + info_file_context.SubmitFiles(kt_files) if java_files: # Don't include the output directory in the initial set of args since it # being in a temp dir makes it unstable (breaks md5 stamping). cmd = list(javac_cmd) - os.makedirs(classes_dir) cmd += ['-d', classes_dir] - if options.processors: - os.makedirs(annotation_processor_outputs_dir) - cmd += ['-s', annotation_processor_outputs_dir] - - if classpath: - cmd += ['-classpath', ':'.join(classpath)] + if options.classpath: + cmd += ['-classpath', ':'.join(options.classpath)] # Pass source paths as response files to avoid extremely long command # lines that are tedius to debug. @@ -494,43 +601,43 @@ def _RunCompiler(options, javac_cmd, java_files, classpath, jar_path, f.write(' '.join(java_files)) cmd += ['@' + java_files_rsp_path] + process_javac_output_partial = functools.partial( + ProcessJavacOutput, target_name=options.target_name) + logging.debug('Build command %s', cmd) start = time.time() build_utils.CheckOutput(cmd, print_stdout=options.chromium_code, - stdout_filter=ProcessJavacOutput, - stderr_filter=ProcessJavacOutput, + stdout_filter=process_javac_output_partial, + stderr_filter=process_javac_output_partial, fail_on_output=options.warnings_as_errors) end = time.time() - start logging.info('Java compilation took %ss', end) - if save_outputs: - if options.processors: - annotation_processor_java_files = build_utils.FindInDirectory( - annotation_processor_outputs_dir) - if annotation_processor_java_files: - info_file_context.SubmitFiles(annotation_processor_java_files) + CreateJarFile(jar_path, classes_dir, service_provider_configuration, + options.additional_jar_files, options.kotlin_jar_path) - _CreateJarFile(jar_path, service_provider_configuration, - options.additional_jar_files, classes_dir) - - info_file_context.Commit(jar_path + '.info') - else: - build_utils.Touch(jar_path) + if save_info_file: + info_file_context.Commit(jar_info_path) logging.info('Completed all steps in _RunCompiler') finally: + if info_file_context: + info_file_context.Close() shutil.rmtree(temp_dir) def _ParseOptions(argv): parser = optparse.OptionParser() - build_utils.AddDepfileOption(parser) + action_helpers.add_depfile_arg(parser) parser.add_option('--target-name', help='Fully qualified GN target name.') parser.add_option('--skip-build-server', action='store_true', help='Avoid using the build server.') + parser.add_option('--use-build-server', + action='store_true', + help='Always use the build server.') parser.add_option( '--java-srcjars', action='append', @@ -540,20 +647,7 @@ def _ParseOptions(argv): '--generated-dir', help='Subdirectory within target_gen_dir to place extracted srcjars and ' 'annotation processor output for codesearch to find.') - parser.add_option( - '--bootclasspath', - action='append', - default=[], - help='Boot classpath for javac. If this is specified multiple times, ' - 'they will all be appended to construct the classpath.') - parser.add_option( - '--java-version', - help='Java language version to use in -source and -target args to javac.') parser.add_option('--classpath', action='append', help='Classpath to use.') - parser.add_option( - '--processors', - action='append', - help='GN list of annotation processor main classes.') parser.add_option( '--processorpath', action='append', @@ -606,16 +700,18 @@ def _ParseOptions(argv): '--header-jar', help='This is the header jar for the current target that contains ' 'META-INF/services/* files to be included in the output jar.') + parser.add_option( + '--kotlin-jar-path', + help='Kotlin jar to be merged into the output jar. This contains the ' + ".class files from this target's .kt files.") options, args = parser.parse_args(argv) build_utils.CheckOptions(options, parser, required=('jar_path', )) - options.bootclasspath = build_utils.ParseGnList(options.bootclasspath) - options.classpath = build_utils.ParseGnList(options.classpath) - options.processorpath = build_utils.ParseGnList(options.processorpath) - options.processors = build_utils.ParseGnList(options.processors) - options.java_srcjars = build_utils.ParseGnList(options.java_srcjars) - options.jar_info_exclude_globs = build_utils.ParseGnList( + options.classpath = action_helpers.parse_gn_list(options.classpath) + options.processorpath = action_helpers.parse_gn_list(options.processorpath) + options.java_srcjars = action_helpers.parse_gn_list(options.java_srcjars) + options.jar_info_exclude_globs = action_helpers.parse_gn_list( options.jar_info_exclude_globs) additional_jar_files = [] @@ -624,30 +720,38 @@ def _ParseOptions(argv): additional_jar_files.append((filepath, jar_filepath)) options.additional_jar_files = additional_jar_files - java_files = [] + files = [] for arg in args: # Interpret a path prefixed with @ as a file containing a list of sources. if arg.startswith('@'): - java_files.extend(build_utils.ReadSourcesList(arg[1:])) + files.extend(build_utils.ReadSourcesList(arg[1:])) else: - java_files.append(arg) + files.append(arg) + + # The target's .sources file contains both Java and Kotlin files. We use + # compile_kt.py to compile the Kotlin files to .class and header jars. Javac + # is run only on .java files. + java_files = [f for f in files if f.endswith('.java')] + # Kotlin files are needed to populate the info file and attribute size in + # supersize back to the appropriate Kotlin file. + kt_files = [f for f in files if f.endswith('.kt')] - return options, java_files + return options, java_files, kt_files def main(argv): build_utils.InitLogging('JAVAC_DEBUG') argv = build_utils.ExpandFileArgs(argv) - options, java_files = _ParseOptions(argv) + options, java_files, kt_files = _ParseOptions(argv) # Only use the build server for errorprone runs. if (options.enable_errorprone and not options.skip_build_server and server_utils.MaybeRunCommand(name=options.target_name, argv=sys.argv, - stamp_file=options.jar_path)): + stamp_file=options.jar_path, + force=options.use_build_server)): return - colorama.init() javac_cmd = [] if options.gomacc_path: javac_cmd.append(options.gomacc_path) @@ -655,6 +759,10 @@ def main(argv): javac_args = [ '-g', + # We currently target JDK 11 everywhere, since Mockito is broken by JDK17. + # See crbug.com/1409661 for more details. + '--release', + '11', # Chromium only allows UTF8 source files. Being explicit avoids # javac pulling a default encoding from the user's environment. '-encoding', @@ -663,6 +771,9 @@ def main(argv): # See: http://blog.ltgt.net/most-build-tools-misuse-javac/ '-sourcepath', ':', + # protobuf-generated files fail this check (javadoc has @deprecated, + # but method missing @Deprecated annotation). + '-Xlint:-dep-ann', ] if options.enable_errorprone: @@ -684,6 +795,22 @@ def main(argv): '-XepPatchChecks:,' + ','.join(ERRORPRONE_CHECKS_TO_APPLY) ] + # These are required to use JDK 16, and are taken directly from + # https://errorprone.info/docs/installation + javac_args += [ + '-J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED', + '-J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED', + '-J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED', + '-J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED', + '-J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED', + '-J--add-exports=jdk.compiler/com.sun.tools.javac.processing=' + 'ALL-UNNAMED', + '-J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED', + '-J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED', + '-J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED', + '-J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED', + ] + javac_args += ['-XDcompilePolicy=simple', ' '.join(errorprone_flags)] # This flag quits errorprone after checks and before code generation, since @@ -692,28 +819,11 @@ def main(argv): if not ERRORPRONE_CHECKS_TO_APPLY: javac_args += ['-XDshould-stop.ifNoError=FLOW'] - if options.java_version: - javac_args.extend([ - '-source', - options.java_version, - '-target', - options.java_version, - ]) - if options.java_version == '1.8': - # Android's boot jar doesn't contain all java 8 classes. - options.bootclasspath.append(build_utils.RT_JAR_PATH) - - if options.processors: - javac_args.extend(['-processor', ','.join(options.processors)]) - else: - # This effectively disables all annotation processors, even including - # annotation processors in service provider configuration files named - # META-INF/. See the following link for reference: - # https://docs.oracle.com/en/java/javase/11/tools/javac.html - javac_args.extend(['-proc:none']) - - if options.bootclasspath: - javac_args.extend(['-bootclasspath', ':'.join(options.bootclasspath)]) + # This effectively disables all annotation processors, even including + # annotation processors in service provider configuration files named + # META-INF/. See the following link for reference: + # https://docs.oracle.com/en/java/javase/11/tools/javac.html + javac_args.extend(['-proc:none']) if options.processorpath: javac_args.extend(['-processorpath', ':'.join(options.processorpath)]) @@ -723,12 +833,11 @@ def main(argv): javac_args.extend(options.javac_arg) - classpath_inputs = ( - options.bootclasspath + options.classpath + options.processorpath) + classpath_inputs = options.classpath + options.processorpath depfile_deps = classpath_inputs # Files that are already inputs in GN should go in input_paths. - input_paths = depfile_deps + options.java_srcjars + java_files + input_paths = depfile_deps + options.java_srcjars + java_files + kt_files if options.header_jar: input_paths.append(options.header_jar) input_paths += [x[0] for x in options.additional_jar_files] @@ -737,19 +846,19 @@ def main(argv): if not options.enable_errorprone: output_paths += [options.jar_path + '.info'] - input_strings = javac_cmd + javac_args + options.classpath + java_files + [ - options.warnings_as_errors, options.jar_info_exclude_globs - ] - - # Keep md5_check since we plan to use its changes feature to implement a build - # speed improvement for non-signature compiles: https://crbug.com/1170778 - md5_check.CallAndWriteDepfileIfStale( - lambda: _OnStaleMd5(options, javac_cmd, javac_args, java_files), - options, - depfile_deps=depfile_deps, - input_paths=input_paths, - input_strings=input_strings, - output_paths=output_paths) + input_strings = (javac_cmd + javac_args + options.classpath + java_files + + kt_files + + [options.warnings_as_errors, options.jar_info_exclude_globs]) + + # Use md5_check for |pass_changes| feature. + md5_check.CallAndWriteDepfileIfStale(lambda changes: _OnStaleMd5( + changes, options, javac_cmd, javac_args, java_files, kt_files), + options, + depfile_deps=depfile_deps, + input_paths=input_paths, + input_strings=input_strings, + output_paths=output_paths, + pass_changes=True) if __name__ == '__main__': diff --git a/build/android/gyp/compile_java.pydeps b/build/android/gyp/compile_java.pydeps index f14fd0bf24a7..45617b15075d 100644 --- a/build/android/gyp/compile_java.pydeps +++ b/build/android/gyp/compile_java.pydeps @@ -1,14 +1,30 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/compile_java.pydeps build/android/gyp/compile_java.py +../../../third_party/catapult/devil/devil/__init__.py +../../../third_party/catapult/devil/devil/android/__init__.py +../../../third_party/catapult/devil/devil/android/constants/__init__.py +../../../third_party/catapult/devil/devil/android/constants/chrome.py +../../../third_party/catapult/devil/devil/android/sdk/__init__.py +../../../third_party/catapult/devil/devil/android/sdk/keyevent.py +../../../third_party/catapult/devil/devil/android/sdk/version_codes.py +../../../third_party/catapult/devil/devil/constants/__init__.py +../../../third_party/catapult/devil/devil/constants/exit_codes.py ../../../third_party/colorama/src/colorama/__init__.py ../../../third_party/colorama/src/colorama/ansi.py ../../../third_party/colorama/src/colorama/ansitowin32.py ../../../third_party/colorama/src/colorama/initialise.py ../../../third_party/colorama/src/colorama/win32.py ../../../third_party/colorama/src/colorama/winterm.py +../../../tools/android/modularization/convenience/lookup_dep.py +../../action_helpers.py ../../gn_helpers.py ../../print_python_deps.py +../../zip_helpers.py +../list_java_targets.py +../pylib/__init__.py +../pylib/constants/__init__.py compile_java.py +javac_output_processor.py util/__init__.py util/build_utils.py util/jar_info_utils.py diff --git a/build/android/gyp/compile_kt.py b/build/android/gyp/compile_kt.py new file mode 100755 index 000000000000..4c7eb6ff8432 --- /dev/null +++ b/build/android/gyp/compile_kt.py @@ -0,0 +1,182 @@ +#!/usr/bin/env python3 +# +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import logging +import os +import shutil +import sys +import time + +import compile_java + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. + + +def _RunCompiler(args, + kotlinc_cmd, + source_files, + jar_path, + intermediates_out_dir=None): + """Runs the Kotlin compiler.""" + logging.info('Starting _RunCompiler') + + source_files = source_files.copy() + kt_files = [f for f in source_files if f.endswith('.kt')] + assert len(kt_files) > 0, 'At least one .kt file must be passed in.' + + java_srcjars = args.java_srcjars + + # Use jar_path's directory to ensure paths are relative (needed for goma). + temp_dir = jar_path + '.staging' + build_utils.DeleteDirectory(temp_dir) + os.makedirs(temp_dir) + try: + classes_dir = os.path.join(temp_dir, 'classes') + os.makedirs(classes_dir) + + input_srcjars_dir = os.path.join(intermediates_out_dir or temp_dir, + 'input_srcjars') + + if java_srcjars: + logging.info('Extracting srcjars to %s', input_srcjars_dir) + build_utils.MakeDirectory(input_srcjars_dir) + for srcjar in args.java_srcjars: + source_files += build_utils.ExtractAll(srcjar, + no_clobber=True, + path=input_srcjars_dir, + pattern='*.java') + logging.info('Done extracting srcjars') + + # Don't include the output directory in the initial set of args since it + # being in a temp dir makes it unstable (breaks md5 stamping). + cmd = list(kotlinc_cmd) + cmd += ['-d', classes_dir] + + if args.classpath: + cmd += ['-classpath', ':'.join(args.classpath)] + + # This a kotlinc plugin to generate header files for .kt files, similar to + # turbine for .java files. + jvm_abi_path = os.path.join(build_utils.KOTLIN_HOME, 'lib', + 'jvm-abi-gen.jar') + cmd += [ + f'-Xplugin={jvm_abi_path}', '-P', + 'plugin:org.jetbrains.kotlin.jvm.abi:outputDir=' + + args.interface_jar_path + ] + + # Pass source paths as response files to avoid extremely long command + # lines that are tedius to debug. + source_files_rsp_path = os.path.join(temp_dir, 'files_list.txt') + with open(source_files_rsp_path, 'w') as f: + f.write(' '.join(source_files)) + cmd += ['@' + source_files_rsp_path] + + # Explicitly set JAVA_HOME since some bots do not have this already set. + env = os.environ.copy() + env['JAVA_HOME'] = build_utils.JAVA_HOME + + logging.debug('Build command %s', cmd) + start = time.time() + build_utils.CheckOutput(cmd, + env=env, + print_stdout=args.chromium_code, + fail_on_output=args.warnings_as_errors) + logging.info('Kotlin compilation took %ss', time.time() - start) + + compile_java.CreateJarFile(jar_path, classes_dir) + + logging.info('Completed all steps in _RunCompiler') + finally: + shutil.rmtree(temp_dir) + + +def _ParseOptions(argv): + parser = argparse.ArgumentParser() + action_helpers.add_depfile_arg(parser) + + parser.add_argument('--java-srcjars', + action='append', + default=[], + help='List of srcjars to include in compilation.') + parser.add_argument( + '--generated-dir', + help='Subdirectory within target_gen_dir to place extracted srcjars and ' + 'annotation processor output for codesearch to find.') + parser.add_argument('--classpath', action='append', help='Classpath to use.') + parser.add_argument( + '--chromium-code', + action='store_true', + help='Whether code being compiled should be built with stricter ' + 'warnings for chromium code.') + parser.add_argument('--gomacc-path', + help='When set, prefix kotlinc command with gomacc') + parser.add_argument('--warnings-as-errors', + action='store_true', + help='Treat all warnings as errors.') + parser.add_argument('--jar-path', help='Jar output path.', required=True) + parser.add_argument('--interface-jar-path', + help='Interface jar output path.', + required=True) + + args, extra_args = parser.parse_known_args(argv) + + args.classpath = action_helpers.parse_gn_list(args.classpath) + args.java_srcjars = action_helpers.parse_gn_list(args.java_srcjars) + + source_files = [] + for arg in extra_args: + # Interpret a path prefixed with @ as a file containing a list of sources. + if arg.startswith('@'): + source_files.extend(build_utils.ReadSourcesList(arg[1:])) + else: + assert not arg.startswith('--'), f'Undefined option {arg}' + source_files.append(arg) + + return args, source_files + + +def main(argv): + build_utils.InitLogging('KOTLINC_DEBUG') + argv = build_utils.ExpandFileArgs(argv) + args, source_files = _ParseOptions(argv) + + kotlinc_cmd = [] + if args.gomacc_path: + kotlinc_cmd.append(args.gomacc_path) + kotlinc_cmd.append(build_utils.KOTLINC_PATH) + + kotlinc_cmd += [ + '-no-jdk', # Avoid depending on the bundled JDK. + # Avoid depending on the bundled Kotlin stdlib. This may have a version + # skew with the one in //third_party/android_deps (which is the one we + # prefer to use). + '-no-stdlib', + # Avoid depending on the bundled Kotlin reflect libs. + '-no-reflect', + ] + + if args.generated_dir: + # Delete any stale files in the generated directory. The purpose of + # args.generated_dir is for codesearch. + shutil.rmtree(args.generated_dir, True) + + _RunCompiler(args, + kotlinc_cmd, + source_files, + args.jar_path, + intermediates_out_dir=args.generated_dir) + + if args.depfile: + # GN already knows of the source files, so avoid listing individual files + # in the depfile. + action_helpers.write_depfile(args.depfile, args.jar_path, args.classpath) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/compile_kt.pydeps b/build/android/gyp/compile_kt.pydeps new file mode 100644 index 000000000000..818bca802ed6 --- /dev/null +++ b/build/android/gyp/compile_kt.pydeps @@ -0,0 +1,33 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/compile_kt.pydeps build/android/gyp/compile_kt.py +../../../third_party/catapult/devil/devil/__init__.py +../../../third_party/catapult/devil/devil/android/__init__.py +../../../third_party/catapult/devil/devil/android/constants/__init__.py +../../../third_party/catapult/devil/devil/android/constants/chrome.py +../../../third_party/catapult/devil/devil/android/sdk/__init__.py +../../../third_party/catapult/devil/devil/android/sdk/keyevent.py +../../../third_party/catapult/devil/devil/android/sdk/version_codes.py +../../../third_party/catapult/devil/devil/constants/__init__.py +../../../third_party/catapult/devil/devil/constants/exit_codes.py +../../../third_party/colorama/src/colorama/__init__.py +../../../third_party/colorama/src/colorama/ansi.py +../../../third_party/colorama/src/colorama/ansitowin32.py +../../../third_party/colorama/src/colorama/initialise.py +../../../third_party/colorama/src/colorama/win32.py +../../../third_party/colorama/src/colorama/winterm.py +../../../tools/android/modularization/convenience/lookup_dep.py +../../action_helpers.py +../../gn_helpers.py +../../print_python_deps.py +../../zip_helpers.py +../list_java_targets.py +../pylib/__init__.py +../pylib/constants/__init__.py +compile_java.py +compile_kt.py +javac_output_processor.py +util/__init__.py +util/build_utils.py +util/jar_info_utils.py +util/md5_check.py +util/server_utils.py diff --git a/build/android/gyp/compile_resources.py b/build/android/gyp/compile_resources.py index 8a668e732ed3..3b1fe7300488 100755 --- a/build/android/gyp/compile_resources.py +++ b/build/android/gyp/compile_resources.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -19,13 +19,12 @@ import hashlib import logging import os +import pathlib import re import shutil import subprocess import sys -import tempfile import textwrap -import zipfile from xml.etree import ElementTree from util import build_utils @@ -34,6 +33,8 @@ from util import parallel from util import protoresources from util import resource_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers # Pngs that we shouldn't convert to webp. Please add rationale when updating. @@ -53,8 +54,24 @@ def _ParseArgs(args): Returns: An options object as from argparse.ArgumentParser.parse_args() """ - parser, input_opts, output_opts = resource_utils.ResourceArgsParser() + parser = argparse.ArgumentParser(description=__doc__) + input_opts = parser.add_argument_group('Input options') + output_opts = parser.add_argument_group('Output options') + + input_opts.add_argument('--include-resources', + action='append', + required=True, + help='Paths to arsc resource files used to link ' + 'against. Can be specified multiple times.') + input_opts.add_argument( + '--dependencies-res-zips', + default=[], + help='Resources zip archives from dependents. Required to ' + 'resolve @type/foo references into dependent libraries.') + input_opts.add_argument( + '--extra-res-packages', + help='Additional package names to generate R.java files for.') input_opts.add_argument( '--aapt2-path', required=True, help='Path to the Android aapt2 tool.') input_opts.add_argument( @@ -76,31 +93,25 @@ def _ParseArgs(args): action='store_true', help='Same as --shared-resources, but also ensures all resource IDs are ' 'directly usable from the APK loaded as an application.') - input_opts.add_argument( '--package-id', type=int, help='Decimal integer representing custom package ID for resources ' '(instead of 127==0x7f). Cannot be used with --shared-resources.') - input_opts.add_argument( '--package-name', help='Package name that will be used to create R class.') - input_opts.add_argument( '--rename-manifest-package', help='Package name to force AAPT to use.') - input_opts.add_argument( '--arsc-package-name', help='Package name to set in manifest of resources.arsc file. This is ' 'only used for apks under test.') - input_opts.add_argument( '--shared-resources-allowlist', help='An R.txt file acting as a allowlist for resources that should be ' 'non-final and have their package ID changed at runtime in R.java. ' 'Implies and overrides --shared-resources.') - input_opts.add_argument( '--shared-resources-allowlist-locales', default='[]', @@ -108,28 +119,13 @@ def _ParseArgs(args): ' to this locale list will be kept in the final output for the ' 'resources identified through --shared-resources-allowlist, even ' 'if --locale-allowlist is being used.') - input_opts.add_argument( '--use-resource-ids-path', help='Use resource IDs generated by aapt --emit-ids.') - - input_opts.add_argument( - '--extra-main-r-text-files', - help='Additional R.txt files that will be added to the root R.java file, ' - 'but not packaged in the generated resources.arsc. If these resources ' - 'entries contain duplicate resources with the generated R.txt file, they ' - 'must be identical.') - - input_opts.add_argument( - '--support-zh-hk', - action='store_true', - help='Use zh-rTW resources for zh-rHK.') - input_opts.add_argument( '--debuggable', action='store_true', help='Whether to add android:debuggable="true".') - input_opts.add_argument('--version-code', help='Version code for apk.') input_opts.add_argument('--version-name', help='Version name for apk.') input_opts.add_argument( @@ -143,7 +139,6 @@ def _ParseArgs(args): help="android:maxSdkVersion expected in AndroidManifest.xml.") input_opts.add_argument( '--manifest-package', help='Package name of the AndroidManifest.xml.') - input_opts.add_argument( '--locale-allowlist', default='[]', @@ -159,17 +154,14 @@ def _ParseArgs(args): default='[]', help='GN list of globs that say which files to include even ' 'when --resource-exclusion-regex is set.') - input_opts.add_argument( '--dependencies-res-zip-overlays', help='GN list with subset of --dependencies-res-zips to use overlay ' 'semantics for.') - input_opts.add_argument( '--values-filter-rules', help='GN list of source_glob:regex for filtering resources after they ' 'are compiled. Use this to filter out entries within values/ files.') - input_opts.add_argument('--png-to-webp', action='store_true', help='Convert png files to webp format.') @@ -177,105 +169,61 @@ def _ParseArgs(args): help='Path to the cwebp binary.') input_opts.add_argument( '--webp-cache-dir', help='The directory to store webp image cache.') - input_opts.add_argument( - '--no-xml-namespaces', + '--is-bundle-module', action='store_true', - help='Whether to strip xml namespaces from processed xml resources.') + help='Whether resources are being generated for a bundle module.') input_opts.add_argument( - '--short-resource-paths', - action='store_true', - help='Whether to shorten resource paths inside the apk or module.') + '--uses-split', + help='Value to set uses-split to in the AndroidManifest.xml.') input_opts.add_argument( - '--strip-resource-names', - action='store_true', - help='Whether to strip resource names from the resource table of the apk ' - 'or module.') + '--verification-version-code-offset', + help='Subtract this from versionCode for expectation files') + input_opts.add_argument( + '--verification-library-version-offset', + help='Subtract this from static-library version for expectation files') + action_helpers.add_depfile_arg(output_opts) output_opts.add_argument('--arsc-path', help='Apk output for arsc format.') output_opts.add_argument('--proto-path', help='Apk output for proto format.') - group = input_opts.add_mutually_exclusive_group() - group.add_argument( - '--optimized-arsc-path', - help='Output for `aapt2 optimize` for arsc format (enables the step).') - group.add_argument( - '--optimized-proto-path', - help='Output for `aapt2 optimize` for proto format (enables the step).') - input_opts.add_argument( - '--resources-config-paths', - default='[]', - help='GN list of paths to aapt2 resources config files.') - output_opts.add_argument( '--info-path', help='Path to output info file for the partial apk.') - output_opts.add_argument( '--srcjar-out', - required=True, help='Path to srcjar to contain generated R.java.') - output_opts.add_argument('--r-text-out', help='Path to store the generated R.txt file.') - output_opts.add_argument( '--proguard-file', help='Path to proguard.txt generated file.') - output_opts.add_argument( '--proguard-file-main-dex', help='Path to proguard.txt generated file for main dex.') - output_opts.add_argument( '--emit-ids-out', help='Path to file produced by aapt2 --emit-ids.') - output_opts.add_argument( - '--resources-path-map-out-path', - help='Path to file produced by aapt2 that maps original resource paths ' - 'to shortened resource paths inside the apk or module.') - - input_opts.add_argument( - '--is-bundle-module', - action='store_true', - help='Whether resources are being generated for a bundle module.') - - input_opts.add_argument( - '--uses-split', - help='Value to set uses-split to in the AndroidManifest.xml.') - - input_opts.add_argument( - '--extra-verification-manifest', - help='Path to AndroidManifest.xml which should be merged into base ' - 'manifest when performing verification.') - diff_utils.AddCommandLineFlags(parser) options = parser.parse_args(args) - resource_utils.HandleCommonOptions(options) - - options.locale_allowlist = build_utils.ParseGnList(options.locale_allowlist) - options.shared_resources_allowlist_locales = build_utils.ParseGnList( + options.include_resources = action_helpers.parse_gn_list( + options.include_resources) + options.dependencies_res_zips = action_helpers.parse_gn_list( + options.dependencies_res_zips) + options.extra_res_packages = action_helpers.parse_gn_list( + options.extra_res_packages) + options.locale_allowlist = action_helpers.parse_gn_list( + options.locale_allowlist) + options.shared_resources_allowlist_locales = action_helpers.parse_gn_list( options.shared_resources_allowlist_locales) - options.resource_exclusion_exceptions = build_utils.ParseGnList( + options.resource_exclusion_exceptions = action_helpers.parse_gn_list( options.resource_exclusion_exceptions) - options.dependencies_res_zip_overlays = build_utils.ParseGnList( + options.dependencies_res_zip_overlays = action_helpers.parse_gn_list( options.dependencies_res_zip_overlays) - options.values_filter_rules = build_utils.ParseGnList( + options.values_filter_rules = action_helpers.parse_gn_list( options.values_filter_rules) - options.extra_main_r_text_files = build_utils.ParseGnList( - options.extra_main_r_text_files) - options.resources_config_paths = build_utils.ParseGnList( - options.resources_config_paths) - - if options.optimized_proto_path and not options.proto_path: - # We could write to a temp file, but it's simpler to require it. - parser.error('--optimized-proto-path requires --proto-path') if not options.arsc_path and not options.proto_path: parser.error('One of --arsc-path or --proto-path is required.') - if options.resources_path_map_out_path and not options.short_resource_paths: - parser.error( - '--resources-path-map-out-path requires --short-resource-paths') - if options.package_id and options.shared_resources: parser.error('--package-id and --shared-resources are mutually exclusive') @@ -288,20 +236,6 @@ def _IterFiles(root_dir): yield os.path.join(root, f) -def _DuplicateZhResources(resource_dirs, path_info): - """Duplicate Taiwanese resources into Hong-Kong specific directory.""" - for resource_dir in resource_dirs: - # We use zh-TW resources for zh-HK (if we have zh-TW resources). - for path in _IterFiles(resource_dir): - if 'zh-rTW' in path: - hk_path = path.replace('zh-rTW', 'zh-rHK') - build_utils.MakeDirectory(os.path.dirname(hk_path)) - shutil.copyfile(path, hk_path) - path_info.RegisterRename( - os.path.relpath(path, resource_dir), - os.path.relpath(hk_path, resource_dir)) - - def _RenameLocaleResourceDirs(resource_dirs, path_info): """Rename locale resource directories into standard names when necessary. @@ -324,15 +258,12 @@ def _RenameLocaleResourceDirs(resource_dirs, path_info): * BCP 47 langauge tags will be renamed to an equivalent ISO 639-1 locale qualifier if possible (e.g. 'values-b+en+US/ -> values-en-rUS'). - Though this is not necessary at the moment, because no third-party - package that Chromium links against uses these for the current list of - supported locales, this may change when the list is extended in the - future). Args: resource_dirs: list of top-level resource directories. """ for resource_dir in resource_dirs: + ignore_dirs = {} for path in _IterFiles(resource_dir): locale = resource_utils.FindLocaleInStringResourceFilePath(path) if not locale: @@ -346,10 +277,24 @@ def _RenameLocaleResourceDirs(resource_dirs, path_info): if path == path2: raise Exception('Could not substitute locale %s for %s in %s' % (locale, locale2, path)) - if os.path.exists(path2): - # This happens sometimes, e.g. some libraries provide both - # values-nb/ and values-no/ with the same content. + + # Ignore rather than rename when the destination resources config + # already exists. + # e.g. some libraries provide both values-nb/ and values-no/. + # e.g. material design provides: + # * res/values-rUS/values-rUS.xml + # * res/values-b+es+419/values-b+es+419.xml + config_dir = os.path.dirname(path2) + already_has_renamed_config = ignore_dirs.get(config_dir) + if already_has_renamed_config is None: + # Cache the result of the first time the directory is encountered + # since subsequent encounters will find the directory already exists + # (due to the rename). + already_has_renamed_config = os.path.exists(config_dir) + ignore_dirs[config_dir] = already_has_renamed_config + if already_has_renamed_config: continue + build_utils.MakeDirectory(os.path.dirname(path2)) shutil.move(path, path2) path_info.RegisterRename( @@ -357,13 +302,11 @@ def _RenameLocaleResourceDirs(resource_dirs, path_info): os.path.relpath(path2, resource_dir)) -def _ToAndroidLocales(locale_allowlist, support_zh_hk): +def _ToAndroidLocales(locale_allowlist): """Converts the list of Chrome locales to Android config locale qualifiers. Args: locale_allowlist: A list of Chromium locale names. - support_zh_hk: True if we need to support zh-HK by duplicating - the zh-TW strings. Returns: A set of matching Android config locale qualifier names. """ @@ -377,14 +320,7 @@ def _ToAndroidLocales(locale_allowlist, support_zh_hk): language = locale.split('-')[0] ret.add(language) - # We don't actually support zh-HK in Chrome on Android, but we mimic the - # native side behavior where we use zh-TW resources when the locale is set to - # zh-HK. See https://crbug.com/780847. - if support_zh_hk: - assert not any('HK' in l for l in locale_allowlist), ( - 'Remove special logic if zh-HK is now supported (crbug.com/780847).') - ret.add('zh-rHK') - return set(ret) + return ret def _MoveImagesToNonMdpiFolders(res_root, path_info): @@ -416,7 +352,37 @@ def _MoveImagesToNonMdpiFolders(res_root, path_info): os.path.relpath(dst_file, res_root)) -def _FixManifest(options, temp_dir, extra_manifest=None): +def _DeterminePlatformVersion(aapt2_path, jar_candidates): + def maybe_extract_version(j): + try: + return resource_utils.ExtractBinaryManifestValues(aapt2_path, j) + except build_utils.CalledProcessError: + return None + + def is_sdk_jar(jar_name): + if jar_name in ('android.jar', 'android_system.jar'): + return True + # Robolectric jar looks a bit different. + return 'android-all' in jar_name and 'robolectric' in jar_name + + android_sdk_jars = [ + j for j in jar_candidates if is_sdk_jar(os.path.basename(j)) + ] + extract_all = [maybe_extract_version(j) for j in android_sdk_jars] + extract_all = [x for x in extract_all if x] + if len(extract_all) == 0: + raise Exception( + 'Unable to find android SDK jar among candidates: %s' + % ', '.join(android_sdk_jars)) + if len(extract_all) > 1: + raise Exception( + 'Found multiple android SDK jars among candidates: %s' + % ', '.join(android_sdk_jars)) + platform_version_code, platform_version_name = extract_all.pop()[:2] + return platform_version_code, platform_version_name + + +def _FixManifest(options, temp_dir): """Fix the APK's AndroidManifest.xml. This adds any missing namespaces for 'android' and 'tools', and @@ -426,74 +392,40 @@ def _FixManifest(options, temp_dir, extra_manifest=None): Args: options: The command-line arguments tuple. temp_dir: A temporary directory where the fixed manifest will be written to. - extra_manifest: Path to an AndroidManifest.xml file which will get merged - into the application node of the base manifest. Returns: Tuple of: * Manifest path within |temp_dir|. * Original package_name. + * Manifest package name. """ - def maybe_extract_version(j): - try: - return resource_utils.ExtractBinaryManifestValues(options.aapt2_path, j) - except build_utils.CalledProcessError: - return None - - android_sdk_jars = [j for j in options.include_resources - if os.path.basename(j) in ('android.jar', - 'android_system.jar')] - extract_all = [maybe_extract_version(j) for j in android_sdk_jars] - successful_extractions = [x for x in extract_all if x] - if len(successful_extractions) == 0: - raise Exception( - 'Unable to find android SDK jar among candidates: %s' - % ', '.join(android_sdk_jars)) - elif len(successful_extractions) > 1: - raise Exception( - 'Found multiple android SDK jars among candidates: %s' - % ', '.join(android_sdk_jars)) - version_code, version_name = successful_extractions.pop()[:2] - - debug_manifest_path = os.path.join(temp_dir, 'AndroidManifest.xml') doc, manifest_node, app_node = manifest_utils.ParseManifest( options.android_manifest) - if extra_manifest: - _, extra_manifest_node, extra_app_node = manifest_utils.ParseManifest( - extra_manifest) - for node in extra_app_node: - app_node.append(node) - for node in extra_manifest_node: - # DFM manifests have a bunch of tags we don't care about inside - # , so only take . - if node.tag == 'queries': - manifest_node.append(node) - - manifest_utils.AssertUsesSdk(manifest_node, options.min_sdk_version, - options.target_sdk_version) - # We explicitly check that maxSdkVersion is set in the manifest since we don't - # add it later like minSdkVersion and targetSdkVersion. - manifest_utils.AssertUsesSdk( - manifest_node, - max_sdk_version=options.max_sdk_version, - fail_if_not_exist=True) - manifest_utils.AssertPackage(manifest_node, options.manifest_package) - - manifest_node.set('platformBuildVersionCode', version_code) - manifest_node.set('platformBuildVersionName', version_name) - - orig_package = manifest_node.get('package') - if options.arsc_package_name: - manifest_node.set('package', options.arsc_package_name) - + # merge_manifest.py also sets package & . We may want to ensure + # manifest merger is always enabled and remove these command-line arguments. + manifest_utils.SetUsesSdk(manifest_node, options.target_sdk_version, + options.min_sdk_version, options.max_sdk_version) + orig_package = manifest_node.get('package') or options.manifest_package + fixed_package = (options.arsc_package_name or options.manifest_package + or orig_package) + manifest_node.set('package', fixed_package) + + platform_version_code, platform_version_name = _DeterminePlatformVersion( + options.aapt2_path, options.include_resources) + manifest_node.set('platformBuildVersionCode', platform_version_code) + manifest_node.set('platformBuildVersionName', platform_version_name) + if options.version_code: + manifest_utils.NamespacedSet(manifest_node, 'versionCode', + options.version_code) + if options.version_name: + manifest_utils.NamespacedSet(manifest_node, 'versionName', + options.version_name) if options.debuggable: - app_node.set('{%s}%s' % (manifest_utils.ANDROID_NAMESPACE, 'debuggable'), - 'true') + manifest_utils.NamespacedSet(app_node, 'debuggable', 'true') if options.uses_split: uses_split = ElementTree.SubElement(manifest_node, 'uses-split') - uses_split.set('{%s}name' % manifest_utils.ANDROID_NAMESPACE, - options.uses_split) + manifest_utils.NamespacedSet(uses_split, 'name', options.uses_split) # Make sure the min-sdk condition is not less than the min-sdk of the bundle. for min_sdk_node in manifest_node.iter('{%s}min-sdk' % @@ -502,8 +434,9 @@ def maybe_extract_version(j): if int(min_sdk_node.get(dist_value)) < int(options.min_sdk_version): min_sdk_node.set(dist_value, options.min_sdk_version) + debug_manifest_path = os.path.join(temp_dir, 'AndroidManifest.xml') manifest_utils.SaveManifest(doc, debug_manifest_path) - return debug_manifest_path, orig_package + return debug_manifest_path, orig_package, fixed_package def _CreateKeepPredicate(resource_exclusion_regex, @@ -710,8 +643,7 @@ def _RemoveUnwantedLocalizedStrings(dep_subdirs, options): # list provided by --locale-allowlist. wanted_locales = all_locales if options.locale_allowlist: - wanted_locales = _ToAndroidLocales(options.locale_allowlist, - options.support_zh_hk) + wanted_locales = _ToAndroidLocales(options.locale_allowlist) # Set B: shared resources locales, which is either set A # or the list provided by --shared-resources-allowlist-locales @@ -723,7 +655,7 @@ def _RemoveUnwantedLocalizedStrings(dep_subdirs, options): options.shared_resources_allowlist)) shared_resources_locales = _ToAndroidLocales( - options.shared_resources_allowlist_locales, options.support_zh_hk) + options.shared_resources_allowlist_locales) # Remove any file that belongs to a locale not covered by # either A or B. @@ -783,8 +715,6 @@ def _PackageApk(options, build): logging.debug('Applying locale transformations') path_info = resource_utils.ResourceInfoFile() - if options.support_zh_hk: - _DuplicateZhResources(dep_subdirs, path_info) _RenameLocaleResourceDirs(dep_subdirs, path_info) logging.debug('Applying file-based exclusions') @@ -816,19 +746,12 @@ def _PackageApk(options, build): 'link', '--auto-add-overlay', '--no-version-vectors', - # Set SDK versions in case they are not set in the Android manifest. - '--min-sdk-version', - options.min_sdk_version, - '--target-sdk-version', - options.target_sdk_version, + '--output-text-symbols', + build.r_txt_path, ] for j in options.include_resources: link_command += ['-I', j] - if options.version_code: - link_command += ['--version-code', options.version_code] - if options.version_name: - link_command += ['--version-name', options.version_name] if options.proguard_file: link_command += ['--proguard', build.proguard_path] link_command += ['--proguard-minimal-keep-rules'] @@ -836,28 +759,24 @@ def _PackageApk(options, build): link_command += ['--proguard-main-dex', build.proguard_main_dex_path] if options.emit_ids_out: link_command += ['--emit-ids', build.emit_ids_path] - if options.r_text_in: - shutil.copyfile(options.r_text_in, build.r_txt_path) - else: - link_command += ['--output-text-symbols', build.r_txt_path] # Note: only one of --proto-format, --shared-lib or --app-as-shared-lib # can be used with recent versions of aapt2. if options.shared_resources: link_command.append('--shared-lib') - if options.no_xml_namespaces: + if int(options.min_sdk_version) > 21: link_command.append('--no-xml-namespaces') if options.package_id: link_command += [ '--package-id', - hex(options.package_id), + '0x%02x' % options.package_id, '--allow-reserved-package-id', ] - fixed_manifest, desired_manifest_package_name = _FixManifest( - options, build.temp_dir) + fixed_manifest, desired_manifest_package_name, fixed_manifest_package = ( + _FixManifest(options, build.temp_dir)) if options.rename_manifest_package: desired_manifest_package_name = options.rename_manifest_package @@ -866,32 +785,41 @@ def _PackageApk(options, build): desired_manifest_package_name ] - # Creates a .zip with AndroidManifest.xml, resources.arsc, res/* - # Also creates R.txt - if options.use_resource_ids_path: - _CreateStableIdsFile(options.use_resource_ids_path, build.stable_ids_path, - desired_manifest_package_name) - link_command += ['--stable-ids', build.stable_ids_path] + if options.package_id is not None: + package_id = options.package_id + elif options.shared_resources: + package_id = 0 + else: + package_id = 0x7f + _CreateStableIdsFile(options.use_resource_ids_path, build.stable_ids_path, + fixed_manifest_package, package_id) + link_command += ['--stable-ids', build.stable_ids_path] link_command += partials # We always create a binary arsc file first, then convert to proto, so flags # such as --shared-lib can be supported. - arsc_path = build.arsc_path - if arsc_path is None: - _, arsc_path = tempfile.mkstmp() link_command += ['-o', build.arsc_path] logging.debug('Starting: aapt2 link') link_proc = subprocess.Popen(link_command) # Create .res.info file in parallel. - _CreateResourceInfoFile(path_info, build.info_path, - options.dependencies_res_zips) - logging.debug('Created .res.info file') + if options.info_path: + logging.debug('Creating .res.info file') + _CreateResourceInfoFile(path_info, build.info_path, + options.dependencies_res_zips) exit_code = link_proc.wait() + assert exit_code == 0, f'aapt2 link cmd failed with {exit_code=}' logging.debug('Finished: aapt2 link') + + if options.shared_resources: + logging.debug('Resolving styleables in R.txt') + # Need to resolve references because unused resource removal tool does not + # support references in R.txt files. + resource_utils.ResolveStyleableReferences(build.r_txt_path) + if exit_code: raise subprocess.CalledProcessError(exit_code, link_command) @@ -902,7 +830,7 @@ def _PackageApk(options, build): # can call it in the case where the APK is being loaded as a library. with open(build.proguard_path, 'a') as proguard_file: keep_rule = ''' - -keep class {package}.R {{ + -keep,allowoptimization class {package}.R {{ public static void onResourcesLoaded(int); }} '''.format(package=desired_manifest_package_name) @@ -927,120 +855,38 @@ def _PackageApk(options, build): build.arsc_path, build.proto_path ]) - if build.arsc_path is None: - os.remove(arsc_path) - - if options.optimized_proto_path: - _OptimizeApk(build.optimized_proto_path, options, build.temp_dir, - build.proto_path, build.r_txt_path) - elif options.optimized_arsc_path: - _OptimizeApk(build.optimized_arsc_path, options, build.temp_dir, - build.arsc_path, build.r_txt_path) - - return desired_manifest_package_name - - -def _CombineResourceConfigs(resources_config_paths, out_config_path): - with open(out_config_path, 'w') as out_config: - for config_path in resources_config_paths: - with open(config_path) as config: - out_config.write(config.read()) - out_config.write('\n') - - -def _OptimizeApk(output, options, temp_dir, unoptimized_path, r_txt_path): - """Optimize intermediate .ap_ file with aapt2. - - Args: - output: Path to write to. - options: The command-line options. - temp_dir: A temporary directory. - unoptimized_path: path of the apk to optimize. - r_txt_path: path to the R.txt file of the unoptimized apk. - """ - optimize_command = [ + # Sanity check that the created resources have the expected package ID. + logging.debug('Performing sanity check') + _, actual_package_id = resource_utils.ExtractArscPackage( options.aapt2_path, - 'optimize', - unoptimized_path, - '-o', - output, - ] - - # Optimize the resources.arsc file by obfuscating resource names and only - # allow usage via R.java constant. - if options.strip_resource_names: - no_collapse_resources = _ExtractNonCollapsableResources(r_txt_path) - gen_config_path = os.path.join(temp_dir, 'aapt2.config') - if options.resources_config_paths: - _CombineResourceConfigs(options.resources_config_paths, gen_config_path) - with open(gen_config_path, 'a') as config: - for resource in no_collapse_resources: - config.write('{}#no_collapse\n'.format(resource)) - - optimize_command += [ - '--collapse-resource-names', - '--resources-config-path', - gen_config_path, - ] - - if options.short_resource_paths: - optimize_command += ['--shorten-resource-paths'] - if options.resources_path_map_out_path: - optimize_command += [ - '--resource-path-shortening-map', options.resources_path_map_out_path - ] - - logging.debug('Running aapt2 optimize') - build_utils.CheckOutput( - optimize_command, print_stdout=False, print_stderr=False) - + build.arsc_path if options.arsc_path else build.proto_path) + # When there are no resources, ExtractArscPackage returns (None, None), in + # this case there is no need to check for matching package ID. + if actual_package_id is not None and actual_package_id != package_id: + raise Exception('Invalid package ID 0x%x (expected 0x%x)' % + (actual_package_id, package_id)) -def _ExtractNonCollapsableResources(rtxt_path): - """Extract resources that should not be collapsed from the R.txt file + return desired_manifest_package_name - Resources of type ID are references to UI elements/views. They are used by - UI automation testing frameworks. They are kept in so that they don't break - tests, even though they may not actually be used during runtime. See - https://crbug.com/900993 - App icons (aka mipmaps) are sometimes referenced by other apps by name so must - be keps as well. See https://b/161564466 - Args: - rtxt_path: Path to R.txt file with all the resources - Returns: - List of resources in the form of / - """ - resources = [] - _NO_COLLAPSE_TYPES = ['id', 'mipmap'] - with open(rtxt_path) as rtxt: - for line in rtxt: - for resource_type in _NO_COLLAPSE_TYPES: - if ' {} '.format(resource_type) in line: - resource_name = line.split()[2] - resources.append('{}/{}'.format(resource_type, resource_name)) - return resources - - -@contextlib.contextmanager -def _CreateStableIdsFile(in_path, out_path, package_name): +def _CreateStableIdsFile(in_path, out_path, package_name, package_id): """Transforms a file generated by --emit-ids from another package. --stable-ids is generally meant to be used by different versions of the same package. To make it work for other packages, we need to transform the package name references to match the package that resources are being generated for. - - Note: This will fail if the package ID of the resources in - |options.use_resource_ids_path| does not match the package ID of the - resources being linked. """ - with open(in_path) as stable_ids_file: - with open(out_path, 'w') as output_ids_file: - output_stable_ids = re.sub( - r'^.*?:', - package_name + ':', - stable_ids_file.read(), - flags=re.MULTILINE) - output_ids_file.write(output_stable_ids) + if in_path: + data = pathlib.Path(in_path).read_text() + else: + # Force IDs to use 0x01 for the type byte in order to ensure they are + # different from IDs generated by other apps. https://crbug.com/1293336 + data = 'pkg:id/fake_resource_id = 0x7f010000\n' + # Replace "pkg:" with correct package name. + data = re.sub(r'^.*?:', package_name + ':', data, flags=re.MULTILINE) + # Replace "0x7f" with correct package id. + data = re.sub(r'0x..', '0x%02x' % package_id, data) + pathlib.Path(out_path).write_text(data) def _WriteOutputs(options, build): @@ -1049,8 +895,6 @@ def _WriteOutputs(options, build): (options.r_text_out, build.r_txt_path), (options.arsc_path, build.arsc_path), (options.proto_path, build.proto_path), - (options.optimized_arsc_path, build.optimized_arsc_path), - (options.optimized_proto_path, build.optimized_proto_path), (options.proguard_file, build.proguard_path), (options.proguard_file_main_dex, build.proguard_main_dex_path), (options.emit_ids_out, build.emit_ids_path), @@ -1065,10 +909,11 @@ def _WriteOutputs(options, build): def _CreateNormalizedManifestForVerification(options): with build_utils.TempDir() as tempdir: - fixed_manifest, _ = _FixManifest( - options, tempdir, extra_manifest=options.extra_verification_manifest) + fixed_manifest, _, _ = _FixManifest(options, tempdir) with open(fixed_manifest) as f: - return manifest_utils.NormalizeManifest(f.read()) + return manifest_utils.NormalizeManifest( + f.read(), options.verification_version_code_offset, + options.verification_library_version_offset) def main(args): @@ -1142,37 +987,27 @@ def main(args): # will be created in the base module. apk_package_name = None - logging.debug('Creating R.srcjar') - resource_utils.CreateRJavaFiles( - build.srcjar_dir, apk_package_name, build.r_txt_path, - options.extra_res_packages, rjava_build_options, options.srcjar_out, - custom_root_package_name, grandparent_custom_package_name, - options.extra_main_r_text_files) - build_utils.ZipDir(build.srcjar_path, build.srcjar_dir) - - # Sanity check that the created resources have the expected package ID. - logging.debug('Performing sanity check') - if options.package_id: - expected_id = options.package_id - elif options.shared_resources: - expected_id = 0 - else: - expected_id = 127 # == '0x7f'. - _, package_id = resource_utils.ExtractArscPackage( - options.aapt2_path, - build.arsc_path if options.arsc_path else build.proto_path) - if package_id != expected_id: - raise Exception( - 'Invalid package ID 0x%x (expected 0x%x)' % (package_id, expected_id)) + if options.srcjar_out: + logging.debug('Creating R.srcjar') + resource_utils.CreateRJavaFiles(build.srcjar_dir, apk_package_name, + build.r_txt_path, + options.extra_res_packages, + rjava_build_options, options.srcjar_out, + custom_root_package_name, + grandparent_custom_package_name) + with action_helpers.atomic_output(build.srcjar_path) as f: + zip_helpers.zip_directory(f, build.srcjar_dir) logging.debug('Copying outputs') _WriteOutputs(options, build) if options.depfile: + assert options.srcjar_out, 'Update first output below and remove assert.' depfile_deps = (options.dependencies_res_zips + options.dependencies_res_zip_overlays + - options.extra_main_r_text_files + options.include_resources) - build_utils.WriteDepfile(options.depfile, options.srcjar_out, depfile_deps) + options.include_resources) + action_helpers.write_depfile(options.depfile, options.srcjar_out, + depfile_deps) if __name__ == '__main__': diff --git a/build/android/gyp/compile_resources.pydeps b/build/android/gyp/compile_resources.pydeps index 174b52697c69..458a772c319f 100644 --- a/build/android/gyp/compile_resources.pydeps +++ b/build/android/gyp/compile_resources.pydeps @@ -1,9 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/compile_resources.pydeps build/android/gyp/compile_resources.py ../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py -../../../third_party/jinja2/asyncfilters.py -../../../third_party/jinja2/asyncsupport.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py ../../../third_party/jinja2/bccache.py ../../../third_party/jinja2/compiler.py ../../../third_party/jinja2/defaults.py @@ -23,31 +22,9 @@ ../../../third_party/markupsafe/__init__.py ../../../third_party/markupsafe/_compat.py ../../../third_party/markupsafe/_native.py -../../../third_party/protobuf/python/google/__init__.py -../../../third_party/protobuf/python/google/protobuf/__init__.py -../../../third_party/protobuf/python/google/protobuf/descriptor.py -../../../third_party/protobuf/python/google/protobuf/descriptor_database.py -../../../third_party/protobuf/python/google/protobuf/descriptor_pool.py -../../../third_party/protobuf/python/google/protobuf/internal/__init__.py -../../../third_party/protobuf/python/google/protobuf/internal/api_implementation.py -../../../third_party/protobuf/python/google/protobuf/internal/containers.py -../../../third_party/protobuf/python/google/protobuf/internal/decoder.py -../../../third_party/protobuf/python/google/protobuf/internal/encoder.py -../../../third_party/protobuf/python/google/protobuf/internal/enum_type_wrapper.py -../../../third_party/protobuf/python/google/protobuf/internal/extension_dict.py -../../../third_party/protobuf/python/google/protobuf/internal/message_listener.py -../../../third_party/protobuf/python/google/protobuf/internal/python_message.py -../../../third_party/protobuf/python/google/protobuf/internal/type_checkers.py -../../../third_party/protobuf/python/google/protobuf/internal/well_known_types.py -../../../third_party/protobuf/python/google/protobuf/internal/wire_format.py -../../../third_party/protobuf/python/google/protobuf/message.py -../../../third_party/protobuf/python/google/protobuf/message_factory.py -../../../third_party/protobuf/python/google/protobuf/reflection.py -../../../third_party/protobuf/python/google/protobuf/symbol_database.py -../../../third_party/protobuf/python/google/protobuf/text_encoding.py -../../../third_party/protobuf/python/google/protobuf/text_format.py -../../../third_party/six/src/six.py +../../action_helpers.py ../../gn_helpers.py +../../zip_helpers.py compile_resources.py proto/Configuration_pb2.py proto/Resources_pb2.py diff --git a/build/android/gyp/copy_ex.py b/build/android/gyp/copy_ex.py index 41604c462784..542a08ca1ba5 100755 --- a/build/android/gyp/copy_ex.py +++ b/build/android/gyp/copy_ex.py @@ -1,12 +1,11 @@ #!/usr/bin/env python3 # -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Copies files to a directory.""" -from __future__ import print_function import filecmp import itertools @@ -16,6 +15,7 @@ import sys from util import build_utils +import action_helpers # build_utils adds //build to sys.path. def _get_all_files(base): @@ -50,8 +50,9 @@ def CopyFile(f, dest, deps): def DoCopy(options, deps): """Copy files or directories given in options.files and update deps.""" - files = list(itertools.chain.from_iterable(build_utils.ParseGnList(f) - for f in options.files)) + files = list( + itertools.chain.from_iterable( + action_helpers.parse_gn_list(f) for f in options.files)) for f in files: if os.path.isdir(f) and not options.clear: @@ -62,13 +63,14 @@ def DoCopy(options, deps): def DoRenaming(options, deps): """Copy and rename files given in options.renaming_sources and update deps.""" - src_files = list(itertools.chain.from_iterable( - build_utils.ParseGnList(f) - for f in options.renaming_sources)) + src_files = list( + itertools.chain.from_iterable( + action_helpers.parse_gn_list(f) for f in options.renaming_sources)) - dest_files = list(itertools.chain.from_iterable( - build_utils.ParseGnList(f) - for f in options.renaming_destinations)) + dest_files = list( + itertools.chain.from_iterable( + action_helpers.parse_gn_list(f) + for f in options.renaming_destinations)) if (len(src_files) != len(dest_files)): print('Renaming source and destination files not match.') @@ -85,7 +87,7 @@ def main(args): args = build_utils.ExpandFileArgs(args) parser = optparse.OptionParser() - build_utils.AddDepfileOption(parser) + action_helpers.add_depfile_arg(parser) parser.add_option('--dest', help='Directory to copy files to.') parser.add_option('--files', action='append', @@ -119,7 +121,7 @@ def main(args): DoRenaming(options, deps) if options.depfile: - build_utils.WriteDepfile(options.depfile, options.stamp, deps) + action_helpers.write_depfile(options.depfile, options.stamp, deps) if options.stamp: build_utils.Touch(options.stamp) diff --git a/build/android/gyp/copy_ex.pydeps b/build/android/gyp/copy_ex.pydeps index 37352512be1c..5d75f9a3965b 100644 --- a/build/android/gyp/copy_ex.pydeps +++ b/build/android/gyp/copy_ex.pydeps @@ -1,5 +1,6 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/copy_ex.pydeps build/android/gyp/copy_ex.py +../../action_helpers.py ../../gn_helpers.py copy_ex.py util/__init__.py diff --git a/build/android/gyp/create_apk_operations_script.py b/build/android/gyp/create_apk_operations_script.py index 660567f0deb2..1d1cb5d1ab72 100755 --- a/build/android/gyp/create_apk_operations_script.py +++ b/build/android/gyp/create_apk_operations_script.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -12,7 +12,7 @@ from util import build_utils SCRIPT_TEMPLATE = string.Template("""\ -#!/usr/bin/env python +#!/usr/bin/env python3 # # This file was generated by build/android/gyp/create_apk_operations_script.py @@ -26,21 +26,14 @@ def main(): sys.path.append(resolve(${APK_OPERATIONS_DIR})) import apk_operations output_dir = resolve(${OUTPUT_DIR}) - try: - apk_operations.Run( - output_dir, - resolve(${APK_PATH}), - [resolve(p) for p in ${ADDITIONAL_APK_PATHS}], - resolve(${INC_JSON_PATH}), - ${FLAGS_FILE}, - ${TARGET_CPU}, - resolve(${MAPPING_PATH})) - except TypeError: - rel_output_dir = os.path.relpath(output_dir) - rel_script_path = os.path.relpath(sys.argv[0], output_dir) - sys.stderr.write('Script out-of-date. Rebuild via:\\n') - sys.stderr.write(' ninja -C %s %s\\n' % (rel_output_dir, rel_script_path)) - return 1 + apk_operations.Run( + output_dir, + resolve(${APK_PATH}), + [resolve(p) for p in ${ADDITIONAL_APK_PATHS}], + resolve(${INC_JSON_PATH}), + ${FLAGS_FILE}, + ${TARGET_CPU}, + resolve(${MAPPING_PATH})) if __name__ == '__main__': diff --git a/build/android/gyp/create_app_bundle.py b/build/android/gyp/create_app_bundle.py index 0b44c163ed1a..128260868a16 100755 --- a/build/android/gyp/create_app_bundle.py +++ b/build/android/gyp/create_app_bundle.py @@ -1,28 +1,33 @@ #!/usr/bin/env python3 # -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Create an Android application bundle from one or more bundle modules.""" import argparse +import concurrent.futures import json +import logging import os +import posixpath import shutil import sys +from xml.etree import ElementTree import zipfile sys.path.append( os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))) from pylib.utils import dexdump +import bundletool from util import build_utils from util import manifest_utils from util import resource_utils -from xml.etree import ElementTree +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers -import bundletool # Location of language-based assets in bundle modules. _LOCALES_SUBDIR = 'assets/locales/' @@ -60,6 +65,11 @@ 'xmf' ] +_COMPONENT_TYPES = ('activity', 'provider', 'receiver', 'service') +_DEDUPE_ENTRY_TYPES = _COMPONENT_TYPES + ('activity-alias', 'meta-data') + +_ROTATION_METADATA_KEY = 'com.google.play.apps.signing/RotationConfig.textproto' + def _ParseArgs(args): parser = argparse.ArgumentParser() @@ -88,6 +98,9 @@ def _ParseArgs(args): '--compress-shared-libraries', action='store_true', help='Whether to store native libraries compressed.') + parser.add_argument('--compress-dex', + action='store_true', + help='Compress .dex files') parser.add_argument('--split-dimensions', help="GN-list of split dimensions to support.") parser.add_argument( @@ -100,6 +113,8 @@ def _ParseArgs(args): 'listed there _and_ in --base-module-rtxt-path will ' 'be kept in the base bundle module, even if language' ' splitting is enabled.') + parser.add_argument('--rotation-config', + help='Path to a RotationConfig.textproto') parser.add_argument('--warnings-as-errors', action='store_true', help='Treat all warnings as errors.') @@ -110,30 +125,47 @@ def _ParseArgs(args): help='Check if services are in base module if isolatedSplits is enabled.') options = parser.parse_args(args) - options.module_zips = build_utils.ParseGnList(options.module_zips) - options.rtxt_in_paths = build_utils.ParseGnList(options.rtxt_in_paths) - options.pathmap_in_paths = build_utils.ParseGnList(options.pathmap_in_paths) + options.module_zips = action_helpers.parse_gn_list(options.module_zips) if len(options.module_zips) == 0: - raise Exception('The module zip list cannot be empty.') + parser.error('The module zip list cannot be empty.') + if len(options.module_zips) != len(options.module_names): + parser.error('# module zips != # names.') + if 'base' not in options.module_names: + parser.error('Missing base module.') + + # Sort modules for more stable outputs. + per_module_values = list( + zip(options.module_names, options.module_zips, + options.uncompressed_assets, options.rtxt_in_paths, + options.pathmap_in_paths)) + per_module_values.sort(key=lambda x: (x[0] != 'base', x[0])) + options.module_names = [x[0] for x in per_module_values] + options.module_zips = [x[1] for x in per_module_values] + options.uncompressed_assets = [x[2] for x in per_module_values] + options.rtxt_in_paths = [x[3] for x in per_module_values] + options.pathmap_in_paths = [x[4] for x in per_module_values] + + options.rtxt_in_paths = action_helpers.parse_gn_list(options.rtxt_in_paths) + options.pathmap_in_paths = action_helpers.parse_gn_list( + options.pathmap_in_paths) # Merge all uncompressed assets into a set. uncompressed_list = [] - if options.uncompressed_assets: - for l in options.uncompressed_assets: - for entry in build_utils.ParseGnList(l): - # Each entry has the following format: 'zipPath' or 'srcPath:zipPath' - pos = entry.find(':') - if pos >= 0: - uncompressed_list.append(entry[pos + 1:]) - else: - uncompressed_list.append(entry) + for entry in action_helpers.parse_gn_list(options.uncompressed_assets): + # Each entry has the following format: 'zipPath' or 'srcPath:zipPath' + pos = entry.find(':') + if pos >= 0: + uncompressed_list.append(entry[pos + 1:]) + else: + uncompressed_list.append(entry) options.uncompressed_assets = set(uncompressed_list) # Check that all split dimensions are valid if options.split_dimensions: - options.split_dimensions = build_utils.ParseGnList(options.split_dimensions) + options.split_dimensions = action_helpers.parse_gn_list( + options.split_dimensions) for dim in options.split_dimensions: if dim.upper() not in _ALL_SPLIT_DIMENSIONS: parser.error('Invalid split dimension "%s" (expected one of: %s)' % ( @@ -162,13 +194,15 @@ def _MakeSplitDimension(value, enabled): return {'value': value, 'negate': not enabled} -def _GenerateBundleConfigJson(uncompressed_assets, compress_shared_libraries, - split_dimensions, base_master_resource_ids): +def _GenerateBundleConfigJson(uncompressed_assets, compress_dex, + compress_shared_libraries, split_dimensions, + base_master_resource_ids): """Generate a dictionary that can be written to a JSON BuildConfig. Args: uncompressed_assets: A list or set of file paths under assets/ that always be stored uncompressed. + compressed_dex: Boolean, whether to compress .dex. compress_shared_libraries: Boolean, whether to compress native libs. split_dimensions: list of split dimensions. base_master_resource_ids: Optional list of 32-bit resource IDs to keep @@ -185,16 +219,22 @@ def _GenerateBundleConfigJson(uncompressed_assets, compress_shared_libraries, split_dimensions = [ _MakeSplitDimension(dim, dim in split_dimensions) for dim in _ALL_SPLIT_DIMENSIONS ] - # Native libraries loaded by the crazy linker. - # Whether other .so files are compressed is controlled by - # "uncompressNativeLibraries". - uncompressed_globs = ['lib/*/crazy.*'] # Locale-specific pak files stored in bundle splits need not be compressed. + uncompressed_globs = [ + 'assets/locales#lang_*/*.pak', 'assets/fallback-locales/*.pak' + ] + # normpath to allow for ../ prefix. uncompressed_globs.extend( - ['assets/locales#lang_*/*.pak', 'assets/fallback-locales/*.pak']) - uncompressed_globs.extend('assets/' + x for x in uncompressed_assets) + posixpath.normpath('assets/' + x) for x in uncompressed_assets) # NOTE: Use '**' instead of '*' to work through directories! uncompressed_globs.extend('**.' + ext for ext in _UNCOMPRESSED_FILE_EXTS) + if not compress_dex: + # Explicit glob required only when using bundletool to create .apks files. + # Play Store looks for and respects "uncompressDexFiles" set below. + # b/176198991 + # This is added as a placeholder entry in order to have no effect unless + # processed with app_bundle_utils.GenerateBundleApks(). + uncompressed_globs.append('classesX.dex') data = { 'optimizations': { @@ -298,11 +338,10 @@ def _SplitModuleForAssetTargeting(src_module_zip, tmp_dir, split_dimensions): if src_path in language_files: dst_path = _RewriteLanguageAssetPath(src_path) - build_utils.AddToZipHermetic( - dst_zip, - dst_path, - data=src_zip.read(src_path), - compress=is_compressed) + zip_helpers.add_to_zip_hermetic(dst_zip, + dst_path, + data=src_zip.read(src_path), + compress=is_compressed) return tmp_zip @@ -382,10 +421,14 @@ def _WriteBundlePathmap(module_pathmap_paths, module_names, def _GetManifestForModule(bundle_path, module_name): - return ElementTree.fromstring( - bundletool.RunBundleTool([ - 'dump', 'manifest', '--bundle', bundle_path, '--module', module_name - ])) + data = bundletool.RunBundleTool( + ['dump', 'manifest', '--bundle', bundle_path, '--module', module_name]) + try: + return ElementTree.fromstring(data) + except ElementTree.ParseError: + sys.stderr.write('Failed to parse:\n') + sys.stderr.write(data) + raise def _GetComponentNames(manifest, tag_name): @@ -393,77 +436,89 @@ def _GetComponentNames(manifest, tag_name): return [s.attrib.get(android_name) for s in manifest.iter(tag_name)] -def _MaybeCheckServicesAndProvidersPresentInBase(bundle_path, module_zips): - """Checks bundles with isolated splits define all services in the base module. - - Due to b/169196314, service classes are not found if they are not present in - the base module. Providers are also checked because they are loaded early in - startup, and keeping them in the base module gives more time for the chrome - split to load. - """ - base_manifest = _GetManifestForModule(bundle_path, 'base') - isolated_splits = base_manifest.get('{%s}isolatedSplits' % - manifest_utils.ANDROID_NAMESPACE) - if isolated_splits != 'true': - return - - # Collect service names from all split manifests. - base_zip = None - service_names = _GetComponentNames(base_manifest, 'service') - provider_names = _GetComponentNames(base_manifest, 'provider') - for module_zip in module_zips: - name = os.path.basename(module_zip)[:-len('.zip')] - if name == 'base': - base_zip = module_zip - else: - service_names.extend( - _GetComponentNames(_GetManifestForModule(bundle_path, name), - 'service')) - module_providers = _GetComponentNames( - _GetManifestForModule(bundle_path, name), 'provider') - if module_providers: - raise Exception("Providers should all be declared in the base manifest." - " '%s' module declared: %s" % (name, module_providers)) - - # Extract classes from the base module's dex. +def _ClassesFromZip(module_zip): classes = set() - base_package_name = manifest_utils.GetPackage(base_manifest) - for package in dexdump.Dump(base_zip): - for name, package_dict in package.items(): - if not name: - name = base_package_name - classes.update('%s.%s' % (name, c) - for c in package_dict['classes'].keys()) - - ignored_service_names = { - # Defined in the chime DFM manifest, but unused. - # org.chromium.chrome.browser.chime.ScheduledTaskService is used instead. - ("com.google.android.libraries.notifications.entrypoints.scheduled." - "ScheduledTaskService"), - - # Defined in the chime DFM manifest, only used pre-O (where isolated - # splits are not supported). - ("com.google.android.libraries.notifications.executor.impl.basic." - "ChimeExecutorApiService"), - } + for package in dexdump.Dump(module_zip): + for java_package, package_dict in package.items(): + java_package += '.' if java_package else '' + classes.update(java_package + c for c in package_dict['classes']) + return classes + + +def _ValidateSplits(bundle_path, module_zips): + logging.info('Reading manifests and running dexdump') + base_zip = next(p for p in module_zips if os.path.basename(p) == 'base.zip') + module_names = sorted(os.path.basename(p)[:-len('.zip')] for p in module_zips) + # Using threads makes these step go from 7s -> 1s on my machine. + with concurrent.futures.ThreadPoolExecutor() as executor: + # Create list of classes from the base module's dex. + classes_future = executor.submit(_ClassesFromZip, base_zip) + + # Create xmltrees of all module manifests. + manifest_futures = [ + executor.submit(_GetManifestForModule, bundle_path, n) + for n in module_names + ] + manifests_by_name = dict( + zip(module_names, (f.result() for f in manifest_futures))) + base_classes = classes_future.result() - # Ensure all services are present in base module. - for service_name in service_names: - if service_name not in classes: - if service_name in ignored_service_names: - continue - raise Exception("Service %s should be present in the base module's dex." + # Collect service names from all split manifests. + logging.info('Performing checks') + errors = [] + + # Ensure there are no components defined in multiple splits. + splits_by_component = {} + for module_name, cur_manifest in manifests_by_name.items(): + for kind in _DEDUPE_ENTRY_TYPES: + for component in _GetComponentNames(cur_manifest, kind): + owner_module_name = splits_by_component.setdefault((kind, component), + module_name) + # Allow services that exist only to keep out of + # ApplicationInfo. + if (owner_module_name != module_name + and not component.endswith('HolderService')): + errors.append(f'The {kind} "{component}" appeared in both ' + f'{owner_module_name} and {module_name}.') + + # Ensure components defined in base manifest exist in base dex. + for (kind, component), module_name in splits_by_component.items(): + if module_name == 'base' and kind in _COMPONENT_TYPES: + if component not in base_classes: + errors.append(f"{component} is defined in the base manfiest, " + f"but the class does not exist in the base splits' dex") + + # Remaining checks apply only when isolatedSplits="true". + isolated_splits = manifests_by_name['base'].get( + f'{manifest_utils.ANDROID_NAMESPACE}isolatedSplits') + if isolated_splits != 'true': + return errors + + # Ensure all providers are present in base module. We enforce this because + # providers are loaded early in startup, and keeping them in the base module + # gives more time for the chrome split to load. + for module_name, cur_manifest in manifests_by_name.items(): + if module_name == 'base': + continue + provider_names = _GetComponentNames(cur_manifest, 'provider') + if provider_names: + errors.append('Providers should all be declared in the base manifest.' + ' "%s" module declared: %s' % (module_name, provider_names)) + + # Ensure all services are present in base module because service classes are + # not found if they are not present in the base module. b/169196314 + # It is fine if they are defined in split manifests though. + for cur_manifest in manifests_by_name.values(): + for service_name in _GetComponentNames(cur_manifest, 'service'): + if service_name not in base_classes: + errors.append("Service %s should be present in the base module's dex." " See b/169196314 for more details." % service_name) - # Ensure all providers are present in base module. - for provider_name in provider_names: - if provider_name not in classes: - raise Exception( - "Provider %s should be present in the base module's dex." % - provider_name) + return errors def main(args): + build_utils.InitLogging('AAB_DEBUG') args = build_utils.ExpandFileArgs(args) options = _ParseArgs(args) @@ -473,18 +528,23 @@ def main(args): with build_utils.TempDir() as tmp_dir: + logging.info('Splitting locale assets') module_zips = [ _SplitModuleForAssetTargeting(module, tmp_dir, split_dimensions) \ for module in options.module_zips] base_master_resource_ids = None if options.base_module_rtxt_path: + logging.info('Creating R.txt allowlist') base_master_resource_ids = _GenerateBaseResourcesAllowList( options.base_module_rtxt_path, options.base_allowlist_rtxt_path) - bundle_config = _GenerateBundleConfigJson( - options.uncompressed_assets, options.compress_shared_libraries, - split_dimensions, base_master_resource_ids) + logging.info('Creating BundleConfig.pb.json') + bundle_config = _GenerateBundleConfigJson(options.uncompressed_assets, + options.compress_dex, + options.compress_shared_libraries, + split_dimensions, + base_master_resource_ids) tmp_bundle = os.path.join(tmp_dir, 'tmp_bundle') @@ -495,7 +555,8 @@ def main(args): with open(tmp_bundle_config, 'w') as f: f.write(bundle_config) - cmd_args = build_utils.JavaCmd(options.warnings_as_errors) + [ + logging.info('Running bundletool') + cmd_args = build_utils.JavaCmd() + [ '-jar', bundletool.BUNDLETOOL_JAR_PATH, 'build-bundle', @@ -504,6 +565,11 @@ def main(args): '--config=' + tmp_bundle_config, ] + if options.rotation_config: + cmd_args += [ + f'--metadata-file={_ROTATION_METADATA_KEY}:{options.rotation_config}' + ] + build_utils.CheckOutput( cmd_args, print_stdout=True, @@ -516,8 +582,15 @@ def main(args): # isolated splits disabled and 2s for bundles with isolated splits # enabled. Consider making this run in parallel or move into a separate # step before enabling isolated splits by default. - _MaybeCheckServicesAndProvidersPresentInBase(tmp_bundle, module_zips) - + logging.info('Validating isolated split manifests') + errors = _ValidateSplits(tmp_bundle, module_zips) + if errors: + sys.stderr.write('Bundle failed sanity checks:\n ') + sys.stderr.write('\n '.join(errors)) + sys.stderr.write('\n') + sys.exit(1) + + logging.info('Writing final output artifacts') shutil.move(tmp_bundle, options.out_bundle) if options.rtxt_out_path: diff --git a/build/android/gyp/create_app_bundle.pydeps b/build/android/gyp/create_app_bundle.pydeps index cbb471abca7d..5e7a79f6387f 100644 --- a/build/android/gyp/create_app_bundle.pydeps +++ b/build/android/gyp/create_app_bundle.pydeps @@ -13,9 +13,8 @@ ../../../third_party/catapult/devil/devil/utils/__init__.py ../../../third_party/catapult/devil/devil/utils/cmd_helper.py ../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py -../../../third_party/jinja2/asyncfilters.py -../../../third_party/jinja2/asyncsupport.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py ../../../third_party/jinja2/bccache.py ../../../third_party/jinja2/compiler.py ../../../third_party/jinja2/defaults.py @@ -35,7 +34,9 @@ ../../../third_party/markupsafe/__init__.py ../../../third_party/markupsafe/_compat.py ../../../third_party/markupsafe/_native.py +../../action_helpers.py ../../gn_helpers.py +../../zip_helpers.py ../pylib/__init__.py ../pylib/constants/__init__.py ../pylib/utils/__init__.py diff --git a/build/android/gyp/create_app_bundle_apks.py b/build/android/gyp/create_app_bundle_apks.py index 5950696b8c8a..2f0dc51d9500 100755 --- a/build/android/gyp/create_app_bundle_apks.py +++ b/build/android/gyp/create_app_bundle_apks.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -32,18 +32,21 @@ def main(): '--minimal', action='store_true', help='Create APKs archive with minimal language support.') + parser.add_argument('--local-testing', + action='store_true', + help='Create APKs archive with local testing support.') args = parser.parse_args() - app_bundle_utils.GenerateBundleApks( - args.bundle, - args.output, - args.aapt2_path, - args.keystore_path, - args.keystore_password, - args.keystore_name, - minimal=args.minimal, - check_for_noop=False) + app_bundle_utils.GenerateBundleApks(args.bundle, + args.output, + args.aapt2_path, + args.keystore_path, + args.keystore_password, + args.keystore_name, + local_testing=args.local_testing, + minimal=args.minimal, + check_for_noop=False) if __name__ == '__main__': diff --git a/build/android/gyp/create_app_bundle_apks.pydeps b/build/android/gyp/create_app_bundle_apks.pydeps index 20d8ffe8f9f7..65810c3eb0c1 100644 --- a/build/android/gyp/create_app_bundle_apks.pydeps +++ b/build/android/gyp/create_app_bundle_apks.pydeps @@ -1,9 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_app_bundle_apks.pydeps build/android/gyp/create_app_bundle_apks.py ../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py -../../../third_party/jinja2/asyncfilters.py -../../../third_party/jinja2/asyncsupport.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py ../../../third_party/jinja2/bccache.py ../../../third_party/jinja2/compiler.py ../../../third_party/jinja2/defaults.py @@ -23,6 +22,7 @@ ../../../third_party/markupsafe/__init__.py ../../../third_party/markupsafe/_compat.py ../../../third_party/markupsafe/_native.py +../../action_helpers.py ../../gn_helpers.py ../../print_python_deps.py ../pylib/__init__.py diff --git a/build/android/gyp/create_bundle_wrapper_script.py b/build/android/gyp/create_bundle_wrapper_script.py index 282e2069a2d3..a3870bf89512 100755 --- a/build/android/gyp/create_bundle_wrapper_script.py +++ b/build/android/gyp/create_bundle_wrapper_script.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -11,9 +11,10 @@ import sys from util import build_utils +import action_helpers # build_utils adds //build to sys.path. SCRIPT_TEMPLATE = string.Template("""\ -#!/usr/bin/env python +#!/usr/bin/env python3 # # This file was generated by build/android/gyp/create_bundle_wrapper_script.py @@ -109,7 +110,7 @@ def relativize(path): 'TARGET_CPU': repr(args.target_cpu), 'SYSTEM_IMAGE_LOCALES': - repr(build_utils.ParseGnList(args.system_image_locales)), + repr(action_helpers.parse_gn_list(args.system_image_locales)), 'DEFAULT_MODULES': repr(args.default_modules), } diff --git a/build/android/gyp/create_bundle_wrapper_script.pydeps b/build/android/gyp/create_bundle_wrapper_script.pydeps index 7758ed6272e8..51d912c837c3 100644 --- a/build/android/gyp/create_bundle_wrapper_script.pydeps +++ b/build/android/gyp/create_bundle_wrapper_script.pydeps @@ -1,5 +1,6 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_bundle_wrapper_script.pydeps build/android/gyp/create_bundle_wrapper_script.py +../../action_helpers.py ../../gn_helpers.py create_bundle_wrapper_script.py util/__init__.py diff --git a/build/android/gyp/create_java_binary_script.py b/build/android/gyp/create_java_binary_script.py index 5bc9d08ab1f5..f9e665f4e3d0 100755 --- a/build/android/gyp/create_java_binary_script.py +++ b/build/android/gyp/create_java_binary_script.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -10,18 +10,19 @@ jar. This includes correctly setting the classpath and the main class. """ -import optparse +import argparse import os import sys from util import build_utils +import action_helpers # build_utils adds //build to sys.path. # The java command must be executed in the current directory because there may # be user-supplied paths in the args. The script receives the classpath relative # to the directory that the script is written in and then, when run, must # recalculate the paths relative to the current directory. script_template = """\ -#!/usr/bin/env python +#!/usr/bin/env python3 # # This file was generated by build/android/gyp/create_java_binary_script.py @@ -76,44 +77,60 @@ def main(argv): argv = build_utils.ExpandFileArgs(argv) - parser = optparse.OptionParser() - parser.add_option('--output', help='Output path for executable script.') - parser.add_option('--main-class', + parser = argparse.ArgumentParser() + parser.add_argument('--output', + required=True, + help='Output path for executable script.') + parser.add_argument( + '--main-class', + required=True, help='Name of the java class with the "main" entry point.') - parser.add_option('--classpath', action='append', default=[], - help='Classpath for running the jar.') - parser.add_option('--noverify', action='store_true', - help='JVM flag: noverify.') - parser.add_option('--tiered-stop-at-level-one', - action='store_true', - help='JVM flag: -XX:TieredStopAtLevel=1.') - - options, extra_program_args = parser.parse_args(argv) - - extra_flags = [] - if options.noverify: - extra_flags.append('java_cmd.append("-noverify")') - if options.tiered_stop_at_level_one: + parser.add_argument('--max-heap-size', + required=True, + help='Argument for -Xmx') + parser.add_argument('--classpath', + action='append', + default=[], + help='Classpath for running the jar.') + parser.add_argument('--tiered-stop-at-level-one', + action='store_true', + help='JVM flag: -XX:TieredStopAtLevel=1.') + parser.add_argument('--use-jdk-11', + action='store_true', + help='Use older JDK11 instead of modern JDK.') + parser.add_argument('extra_program_args', + nargs='*', + help='This captures all ' + 'args after "--" to pass as extra args to the java cmd.') + + args = parser.parse_args(argv) + + extra_flags = [f'java_cmd.append("-Xmx{args.max_heap_size}")'] + if args.tiered_stop_at_level_one: extra_flags.append('java_cmd.append("-XX:TieredStopAtLevel=1")') classpath = [] - for cp_arg in options.classpath: - classpath += build_utils.ParseGnList(cp_arg) + for cp_arg in args.classpath: + classpath += action_helpers.parse_gn_list(cp_arg) - run_dir = os.path.dirname(options.output) + run_dir = os.path.dirname(args.output) classpath = [os.path.relpath(p, run_dir) for p in classpath] - java_path = os.path.relpath( - os.path.join(build_utils.JAVA_HOME, 'bin', 'java'), run_dir) - with build_utils.AtomicOutput(options.output, mode='w') as script: + if args.use_jdk_11: + java_home = build_utils.JAVA_11_HOME_DEPRECATED + else: + java_home = build_utils.JAVA_HOME + java_path = os.path.relpath(os.path.join(java_home, 'bin', 'java'), run_dir) + + with action_helpers.atomic_output(args.output, mode='w') as script: script.write( script_template.format(classpath=('"%s"' % '", "'.join(classpath)), java_path=repr(java_path), - main_class=options.main_class, - extra_program_args=repr(extra_program_args), + main_class=args.main_class, + extra_program_args=repr(args.extra_program_args), extra_flags='\n'.join(extra_flags))) - os.chmod(options.output, 0o750) + os.chmod(args.output, 0o750) if __name__ == '__main__': diff --git a/build/android/gyp/create_java_binary_script.pydeps b/build/android/gyp/create_java_binary_script.pydeps index 6bc21fa7e206..a0a740dec9f1 100644 --- a/build/android/gyp/create_java_binary_script.pydeps +++ b/build/android/gyp/create_java_binary_script.pydeps @@ -1,5 +1,6 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_java_binary_script.pydeps build/android/gyp/create_java_binary_script.py +../../action_helpers.py ../../gn_helpers.py create_java_binary_script.py util/__init__.py diff --git a/build/android/gyp/create_r_java.py b/build/android/gyp/create_r_java.py index 97e512d2f88e..b662a39695fb 100755 --- a/build/android/gyp/create_r_java.py +++ b/build/android/gyp/create_r_java.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Writes a dummy R.java file from a list of R.txt files.""" @@ -9,6 +9,8 @@ from util import build_utils from util import resource_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers def _ConcatRTxts(rtxt_in_paths, combined_out_path): @@ -34,12 +36,13 @@ def _CreateRJava(rtxts, package_name, srcjar_out): rjava_build_options=rjava_build_options, srcjar_out=srcjar_out, ignore_mismatched_values=True) - build_utils.ZipDir(srcjar_out, build.srcjar_dir) + with action_helpers.atomic_output(srcjar_out) as f: + zip_helpers.zip_directory(f, build.srcjar_dir) def main(args): parser = argparse.ArgumentParser(description='Create an R.java srcjar.') - build_utils.AddDepfileOption(parser) + action_helpers.add_depfile_arg(parser) parser.add_argument('--srcjar-out', required=True, help='Path to output srcjar.') @@ -50,12 +53,12 @@ def main(args): required=True, help='R.java package to use.') options = parser.parse_args(build_utils.ExpandFileArgs(args)) - options.deps_rtxts = build_utils.ParseGnList(options.deps_rtxts) + options.deps_rtxts = action_helpers.parse_gn_list(options.deps_rtxts) _CreateRJava(options.deps_rtxts, options.r_package, options.srcjar_out) - build_utils.WriteDepfile(options.depfile, - options.srcjar_out, - inputs=options.deps_rtxts) + action_helpers.write_depfile(options.depfile, + options.srcjar_out, + inputs=options.deps_rtxts) if __name__ == "__main__": diff --git a/build/android/gyp/create_r_java.pydeps b/build/android/gyp/create_r_java.pydeps index 45121e3f7cc2..20fd1f8bd4af 100644 --- a/build/android/gyp/create_r_java.pydeps +++ b/build/android/gyp/create_r_java.pydeps @@ -1,9 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_r_java.pydeps build/android/gyp/create_r_java.py ../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py -../../../third_party/jinja2/asyncfilters.py -../../../third_party/jinja2/asyncsupport.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py ../../../third_party/jinja2/bccache.py ../../../third_party/jinja2/compiler.py ../../../third_party/jinja2/defaults.py @@ -23,7 +22,9 @@ ../../../third_party/markupsafe/__init__.py ../../../third_party/markupsafe/_compat.py ../../../third_party/markupsafe/_native.py +../../action_helpers.py ../../gn_helpers.py +../../zip_helpers.py create_r_java.py util/__init__.py util/build_utils.py diff --git a/build/android/gyp/create_r_txt.py b/build/android/gyp/create_r_txt.py index 2adde5dfb9b2..429f62f06f42 100755 --- a/build/android/gyp/create_r_txt.py +++ b/build/android/gyp/create_r_txt.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Writes a dummy R.txt file from a resource zip.""" diff --git a/build/android/gyp/create_r_txt.pydeps b/build/android/gyp/create_r_txt.pydeps index c7698eefaa60..65378f038aa2 100644 --- a/build/android/gyp/create_r_txt.pydeps +++ b/build/android/gyp/create_r_txt.pydeps @@ -1,9 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_r_txt.pydeps build/android/gyp/create_r_txt.py ../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py -../../../third_party/jinja2/asyncfilters.py -../../../third_party/jinja2/asyncsupport.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py ../../../third_party/jinja2/bccache.py ../../../third_party/jinja2/compiler.py ../../../third_party/jinja2/defaults.py @@ -23,6 +22,7 @@ ../../../third_party/markupsafe/__init__.py ../../../third_party/markupsafe/_compat.py ../../../third_party/markupsafe/_native.py +../../action_helpers.py ../../gn_helpers.py create_r_txt.py util/__init__.py diff --git a/build/android/gyp/create_size_info_files.py b/build/android/gyp/create_size_info_files.py index c60b02d7c8e4..24fcf8dc8bc0 100755 --- a/build/android/gyp/create_size_info_files.py +++ b/build/android/gyp/create_size_info_files.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -15,6 +15,7 @@ from util import build_utils from util import jar_info_utils +import action_helpers # build_utils adds //build to sys.path. _AAR_VERSION_PATTERN = re.compile(r'/[^/]*?(\.aar/|\.jar/)') @@ -40,9 +41,7 @@ def _TransformAarPaths(path): def _MergeResInfoFiles(res_info_path, info_paths): # Concatenate them all. - # only_if_changed=False since no build rules depend on this as an input. - with build_utils.AtomicOutput(res_info_path, only_if_changed=False, - mode='w+') as dst: + with action_helpers.atomic_output(res_info_path, 'w+') as dst: for p in info_paths: with open(p) as src: dst.writelines(_TransformAarPaths(l) for l in src) @@ -58,8 +57,9 @@ def _MergePakInfoFiles(merged_path, pak_infos): with open(pak_info_path, 'r') as src_info_file: info_lines.update(_TransformAarPaths(x) for x in src_info_file) # only_if_changed=False since no build rules depend on this as an input. - with build_utils.AtomicOutput(merged_path, only_if_changed=False, - mode='w+') as f: + with action_helpers.atomic_output(merged_path, + only_if_changed=False, + mode='w+') as f: f.writelines(sorted(info_lines)) @@ -121,7 +121,7 @@ def _MergeJarInfoFiles(output, inputs): attributed_path, name)) # only_if_changed=False since no build rules depend on this as an input. - with build_utils.AtomicOutput(output, only_if_changed=False) as f: + with action_helpers.atomic_output(output, only_if_changed=False) as f: jar_info_utils.WriteJarInfoFile(f, info_data) @@ -139,7 +139,7 @@ def _FindJarInputs(jar_paths): def main(args): args = build_utils.ExpandFileArgs(args) parser = argparse.ArgumentParser(description=__doc__) - build_utils.AddDepfileOption(parser) + action_helpers.add_depfile_arg(parser) parser.add_argument( '--jar-info-path', required=True, help='Output .jar.info file') parser.add_argument( @@ -170,9 +170,9 @@ def main(args): options = parser.parse_args(args) - options.jar_files = build_utils.ParseGnList(options.jar_files) - options.assets = build_utils.ParseGnList(options.assets) - options.uncompressed_assets = build_utils.ParseGnList( + options.jar_files = action_helpers.parse_gn_list(options.jar_files) + options.assets = action_helpers.parse_gn_list(options.assets) + options.uncompressed_assets = action_helpers.parse_gn_list( options.uncompressed_assets) jar_inputs = _FindJarInputs(_RemoveDuplicatesFromList(options.jar_files)) @@ -186,9 +186,9 @@ def main(args): _MergeResInfoFiles(options.res_info_path, res_inputs) all_inputs = jar_inputs + pak_inputs + res_inputs - build_utils.WriteDepfile(options.depfile, - options.jar_info_path, - inputs=all_inputs) + action_helpers.write_depfile(options.depfile, + options.jar_info_path, + inputs=all_inputs) if __name__ == '__main__': diff --git a/build/android/gyp/create_size_info_files.pydeps b/build/android/gyp/create_size_info_files.pydeps index 1a69c553d761..0dd61cbb35d7 100644 --- a/build/android/gyp/create_size_info_files.pydeps +++ b/build/android/gyp/create_size_info_files.pydeps @@ -1,5 +1,6 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_size_info_files.pydeps build/android/gyp/create_size_info_files.py +../../action_helpers.py ../../gn_helpers.py create_size_info_files.py util/__init__.py diff --git a/build/android/gyp/create_stub_manifest.py b/build/android/gyp/create_stub_manifest.py new file mode 100755 index 000000000000..889fa26bf8d3 --- /dev/null +++ b/build/android/gyp/create_stub_manifest.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python3 + +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Generates AndroidManifest.xml for a -Stub.apk.""" + +import argparse +import pathlib + +_MAIN_TEMPLATE = """\ + + + {} + +""" + +_STATIC_LIBRARY_TEMPLATE = """ + +""" + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--static-library-name') + parser.add_argument('--static-library-version') + parser.add_argument('--output', required=True) + args = parser.parse_args() + + static_library_part = '' + if args.static_library_name: + static_library_part = _STATIC_LIBRARY_TEMPLATE.format( + args.static_library_name, args.static_library_version) + + data = _MAIN_TEMPLATE.format(static_library_part) + pathlib.Path(args.output).write_text(data, encoding='utf8') + + +if __name__ == '__main__': + main() diff --git a/build/android/gyp/create_test_apk_wrapper_script.py b/build/android/gyp/create_test_apk_wrapper_script.py new file mode 100755 index 000000000000..1e6374872432 --- /dev/null +++ b/build/android/gyp/create_test_apk_wrapper_script.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Create a wrapper script to run a test apk using apk_operations.py.""" + +import argparse +import os +import string +import sys + +from util import build_utils + +SCRIPT_TEMPLATE = string.Template("""\ +#!/usr/bin/env python3 +# +# This file was generated by build/android/gyp/create_test_apk_wrapper_script.py + +import os +import sys + +def main(): + script_directory = os.path.dirname(__file__) + resolve = lambda p: p if p is None else os.path.abspath(os.path.join( + script_directory, p)) + sys.path.append(resolve(${WRAPPED_SCRIPT_DIR})) + import apk_operations + + additional_apk_paths = [resolve(p) for p in ${ADDITIONAL_APKS}] + apk_operations.RunForTestApk( + output_directory=resolve(${OUTPUT_DIR}), + package_name=${PACKAGE_NAME}, + test_apk_path=resolve(${TEST_APK}), + test_apk_json=resolve(${TEST_APK_JSON}), + proguard_mapping_path=resolve(${MAPPING_PATH}), + additional_apk_paths=additional_apk_paths) + +if __name__ == '__main__': + sys.exit(main()) +""") + + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = argparse.ArgumentParser() + parser.add_argument('--script-output-path', + required=True, + help='Output path for executable script.') + parser.add_argument('--package-name', required=True) + parser.add_argument('--test-apk') + parser.add_argument('--test-apk-incremental-install-json') + parser.add_argument('--proguard-mapping-path') + parser.add_argument('--additional-apk', + action='append', + dest='additional_apks', + default=[], + help='Paths to APKs to be installed prior to --apk-path.') + args = parser.parse_args(args) + + def relativize(path): + """Returns the path relative to the output script directory.""" + if path is None: + return path + return os.path.relpath(path, os.path.dirname(args.script_output_path)) + + wrapped_script_dir = os.path.join(os.path.dirname(__file__), os.path.pardir) + wrapped_script_dir = relativize(wrapped_script_dir) + with open(args.script_output_path, 'w') as script: + script_dict = { + 'WRAPPED_SCRIPT_DIR': repr(wrapped_script_dir), + 'OUTPUT_DIR': repr(relativize('.')), + 'PACKAGE_NAME': repr(args.package_name), + 'TEST_APK': repr(relativize(args.test_apk)), + 'TEST_APK_JSON': + repr(relativize(args.test_apk_incremental_install_json)), + 'MAPPING_PATH': repr(relativize(args.proguard_mapping_path)), + 'ADDITIONAL_APKS': [relativize(p) for p in args.additional_apks], + } + script.write(SCRIPT_TEMPLATE.substitute(script_dict)) + os.chmod(args.script_output_path, 0o750) + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/jetify_jar.pydeps b/build/android/gyp/create_test_apk_wrapper_script.pydeps similarity index 50% rename from build/android/gyp/jetify_jar.pydeps rename to build/android/gyp/create_test_apk_wrapper_script.pydeps index 6a1a589a7d05..d52f3438fdf9 100644 --- a/build/android/gyp/jetify_jar.pydeps +++ b/build/android/gyp/create_test_apk_wrapper_script.pydeps @@ -1,6 +1,6 @@ # Generated by running: -# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/jetify_jar.pydeps build/android/gyp/jetify_jar.py +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_test_apk_wrapper_script.pydeps build/android/gyp/create_test_apk_wrapper_script.py ../../gn_helpers.py -jetify_jar.py +create_test_apk_wrapper_script.py util/__init__.py util/build_utils.py diff --git a/build/android/gyp/create_ui_locale_resources.py b/build/android/gyp/create_ui_locale_resources.py index 772dab770972..c767bc50121a 100755 --- a/build/android/gyp/create_ui_locale_resources.py +++ b/build/android/gyp/create_ui_locale_resources.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -27,6 +27,9 @@ from util import build_utils from util import resource_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + # A small string template for the content of each strings.xml file. # NOTE: The name is chosen to avoid any conflicts with other string defined @@ -52,8 +55,10 @@ def _AddLocaleResourceFileToZip(out_zip, android_locale, locale): zip_path = 'values-%s/strings.xml' % android_locale else: zip_path = 'values/strings.xml' - build_utils.AddToZipHermetic( - out_zip, zip_path, data=locale_data, compress=False) + zip_helpers.add_to_zip_hermetic(out_zip, + zip_path, + data=locale_data, + compress=False) def main(): @@ -69,11 +74,11 @@ def main(): args = parser.parse_args() - locale_list = build_utils.ParseGnList(args.locale_list) + locale_list = action_helpers.parse_gn_list(args.locale_list) if not locale_list: raise Exception('Locale list cannot be empty!') - with build_utils.AtomicOutput(args.output_zip) as tmp_file: + with action_helpers.atomic_output(args.output_zip) as tmp_file: with zipfile.ZipFile(tmp_file, 'w') as out_zip: # First, write the default value, since aapt requires one. _AddLocaleResourceFileToZip(out_zip, '', _DEFAULT_CHROME_LOCALE) diff --git a/build/android/gyp/create_ui_locale_resources.pydeps b/build/android/gyp/create_ui_locale_resources.pydeps index 6bb98dd2f204..5cffc7906aba 100644 --- a/build/android/gyp/create_ui_locale_resources.pydeps +++ b/build/android/gyp/create_ui_locale_resources.pydeps @@ -1,9 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_ui_locale_resources.pydeps build/android/gyp/create_ui_locale_resources.py ../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py -../../../third_party/jinja2/asyncfilters.py -../../../third_party/jinja2/asyncsupport.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py ../../../third_party/jinja2/bccache.py ../../../third_party/jinja2/compiler.py ../../../third_party/jinja2/defaults.py @@ -23,7 +22,9 @@ ../../../third_party/markupsafe/__init__.py ../../../third_party/markupsafe/_compat.py ../../../third_party/markupsafe/_native.py +../../action_helpers.py ../../gn_helpers.py +../../zip_helpers.py create_ui_locale_resources.py util/__init__.py util/build_utils.py diff --git a/build/android/gyp/create_unwind_table.py b/build/android/gyp/create_unwind_table.py new file mode 100755 index 000000000000..83cd73d6546d --- /dev/null +++ b/build/android/gyp/create_unwind_table.py @@ -0,0 +1,1095 @@ +#!/usr/bin/env python3 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Creates a table of unwind information in Android Chrome's bespoke format.""" + +import abc +import argparse +import collections +import enum +import json +import logging +import re +import struct +import subprocess +import sys +from typing import (Dict, Iterable, List, NamedTuple, Sequence, TextIO, Tuple, + Union) + +from util import build_utils + +_STACK_CFI_INIT_REGEX = re.compile( + r'^STACK CFI INIT ([0-9a-f]+) ([0-9a-f]+) (.+)$') +_STACK_CFI_REGEX = re.compile(r'^STACK CFI ([0-9a-f]+) (.+)$') + + +class AddressCfi(NamedTuple): + """Record representing CFI for an address within a function. + + Represents the Call Frame Information required to unwind from an address in a + function. + + Attributes: + address: The address. + unwind_instructions: The unwind instructions for the address. + + """ + address: int + unwind_instructions: str + + +class FunctionCfi(NamedTuple): + """Record representing CFI for a function. + + Note: address_cfi[0].address is the start address of the function. + + Attributes: + size: The function size in bytes. + address_cfi: The CFI at each address in the function. + + """ + size: int + address_cfi: Tuple[AddressCfi, ...] + + +def FilterToNonTombstoneCfi(stream: TextIO) -> Iterable[str]: + """Generates non-tombstone STACK CFI lines from the stream. + + STACK CFI functions with address 0 correspond are a 'tombstone' record + associated with dead code and can be ignored. See + https://bugs.llvm.org/show_bug.cgi?id=47148#c2. + + Args: + stream: A file object. + + Returns: + An iterable over the non-tombstone STACK CFI lines in the stream. + """ + in_tombstone_function = False + for line in stream: + if not line.startswith('STACK CFI '): + continue + + if line.startswith('STACK CFI INIT 0 '): + in_tombstone_function = True + elif line.startswith('STACK CFI INIT '): + in_tombstone_function = False + + if not in_tombstone_function: + yield line + + +def ReadFunctionCfi(stream: TextIO) -> Iterable[FunctionCfi]: + """Generates FunctionCfi records from the stream. + + Args: + stream: A file object. + + Returns: + An iterable over FunctionCfi corresponding to the non-tombstone STACK CFI + lines in the stream. + """ + current_function_address = None + current_function_size = None + current_function_address_cfi = [] + for line in FilterToNonTombstoneCfi(stream): + cfi_init_match = _STACK_CFI_INIT_REGEX.search(line) + if cfi_init_match: + # Function CFI with address 0 are tombstone entries per + # https://bugs.llvm.org/show_bug.cgi?id=47148#c2 and should have been + # filtered in `FilterToNonTombstoneCfi`. + assert current_function_address != 0 + if (current_function_address is not None + and current_function_size is not None): + yield FunctionCfi(current_function_size, + tuple(current_function_address_cfi)) + current_function_address = int(cfi_init_match.group(1), 16) + current_function_size = int(cfi_init_match.group(2), 16) + current_function_address_cfi = [ + AddressCfi(int(cfi_init_match.group(1), 16), cfi_init_match.group(3)) + ] + else: + cfi_match = _STACK_CFI_REGEX.search(line) + assert cfi_match + current_function_address_cfi.append( + AddressCfi(int(cfi_match.group(1), 16), cfi_match.group(2))) + + assert current_function_address is not None + assert current_function_size is not None + yield FunctionCfi(current_function_size, tuple(current_function_address_cfi)) + + +def EncodeAsBytes(*values: int) -> bytes: + """Encodes the argument ints as bytes. + + This function validates that the inputs are within the range that can be + represented as bytes. + + Args: + values: Integers in range [0, 255]. + + Returns: + The values encoded as bytes. + """ + for i, value in enumerate(values): + if not 0 <= value <= 255: + raise ValueError('value = %d out of bounds at byte %d' % (value, i)) + return bytes(values) + + +def Uleb128Encode(value: int) -> bytes: + """Encodes the argument int to ULEB128 format. + + Args: + value: Unsigned integer. + + Returns: + The values encoded as ULEB128 bytes. + """ + if value < 0: + raise ValueError(f'Cannot uleb128 encode negative value ({value}).') + + uleb128_bytes = [] + done = False + while not done: + value, lowest_seven_bits = divmod(value, 0x80) + done = value == 0 + uleb128_bytes.append(lowest_seven_bits | (0x80 if not done else 0x00)) + return EncodeAsBytes(*uleb128_bytes) + + +def EncodeStackPointerUpdate(offset: int) -> bytes: + """Encodes a stack pointer update as arm unwind instructions. + + Args: + offset: Offset to apply on stack pointer. Should be in range [-0x204, inf). + + Returns: + A list of arm unwind instructions as bytes. + """ + assert offset % 4 == 0 + + abs_offset = abs(offset) + instruction_code = 0b01000000 if offset < 0 else 0b00000000 + if 0x04 <= abs_offset <= 0x200: + instructions = [ + # vsp = vsp + (xxxxxx << 2) + 4. Covers range 0x04-0x100 inclusive. + instruction_code | ((min(abs_offset, 0x100) - 4) >> 2) + ] + # For vsp increments of 0x104-0x200 we use 00xxxxxx twice. + if abs_offset >= 0x104: + instructions.append(instruction_code | ((abs_offset - 0x100 - 4) >> 2)) + try: + return EncodeAsBytes(*instructions) + except ValueError as e: + raise RuntimeError('offset = %d produced out of range value' % + offset) from e + else: + # This only encodes positive sp movement. + assert offset > 0, offset + return EncodeAsBytes(0b10110010 # vsp = vsp + 0x204 + (uleb128 << 2) + ) + Uleb128Encode((offset - 0x204) >> 2) + + +def EncodePop(registers: Sequence[int]) -> bytes: + """Encodes popping of a sequence of registers as arm unwind instructions. + + Args: + registers: Collection of target registers to accept values popped from + stack. Register value order in the sequence does not matter. Values are + popped based on register index order. + + Returns: + A list of arm unwind instructions as bytes. + """ + assert all( + r in range(4, 16) + for r in registers), f'Can only pop r4 ~ r15. Registers:\n{registers}.' + assert len(registers) > 0, 'Register sequence cannot be empty.' + + instructions: List[int] = [] + + # Check if the pushed registers are continuous set starting from r4 (and + # ending prior to r12). This scenario has its own encoding. + pop_lr = 14 in registers + non_lr_registers = [r for r in registers if r != 14] + non_lr_registers_continuous_from_r4 = \ + sorted(non_lr_registers) == list(range(4, 4 + len(non_lr_registers))) + + if (pop_lr and 0 < len(non_lr_registers) <= 8 + and non_lr_registers_continuous_from_r4): + instructions.append(0b10101000 + | (len(non_lr_registers) - 1) # Pop r4-r[4+nnn], r14. + ) + else: + register_bits = 0 + for register in registers: + register_bits |= 1 << register + register_bits = register_bits >> 4 # Skip r0 ~ r3. + instructions.extend([ + # Pop up to 12 integer registers under masks {r15-r12}, {r11-r4}. + 0b10000000 | (register_bits >> 8), + register_bits & 0xff + ]) + + return EncodeAsBytes(*instructions) + + +class UnwindType(enum.Enum): + """ + The type of unwind action to perform. + """ + + # Use lr as the return address. + RETURN_TO_LR = 1 + + # Increment or decrement the stack pointer and/or pop registers (r4 ~ r15). + # If both, the increment/decrement occurs first. + UPDATE_SP_AND_OR_POP_REGISTERS = 2 + + # Restore the stack pointer from a register then increment/decrement the stack + # pointer. + RESTORE_SP_FROM_REGISTER = 3 + + # No action necessary. Used for floating point register pops. + NO_ACTION = 4 + + +class AddressUnwind(NamedTuple): + """Record representing unwind information for an address within a function. + + Attributes: + address_offset: The offset of the address from the start of the function. + unwind_type: The type of unwind to perform from the address. + sp_offset: The offset to apply to the stack pointer. + registers: The registers involved in the unwind. + """ + address_offset: int + unwind_type: UnwindType + sp_offset: int + registers: Tuple[int, ...] + + +class FunctionUnwind(NamedTuple): + """Record representing unwind information for a function. + + Attributes: + address: The address of the function. + size: The function size in bytes. + address_unwind_info: The unwind info at each address in the function. + """ + + address: int + size: int + address_unwinds: Tuple[AddressUnwind, ...] + + +def EncodeAddressUnwind(address_unwind: AddressUnwind) -> bytes: + """Encodes an `AddressUnwind` object as arm unwind instructions. + + Args: + address_unwind: Record representing unwind information for an address within + a function. + + Returns: + A list of arm unwind instructions as bytes. + """ + if address_unwind.unwind_type == UnwindType.RETURN_TO_LR: + return EncodeAsBytes(0b10110000) # Finish. + if address_unwind.unwind_type == UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS: + return ((EncodeStackPointerUpdate(address_unwind.sp_offset) + if address_unwind.sp_offset else b'') + + (EncodePop(address_unwind.registers) + if address_unwind.registers else b'')) + + if address_unwind.unwind_type == UnwindType.RESTORE_SP_FROM_REGISTER: + assert len(address_unwind.registers) == 1 + return (EncodeAsBytes(0b10010000 + | address_unwind.registers[0] # Set vsp = r[nnnn]. + ) + + (EncodeStackPointerUpdate(address_unwind.sp_offset) + if address_unwind.sp_offset else b'')) + + if address_unwind.unwind_type == UnwindType.NO_ACTION: + return b'' + + assert False, 'unknown unwind type' + return b'' + + +class UnwindInstructionsParser(abc.ABC): + """Base class for parsers of breakpad unwind instruction sequences. + + Provides regexes matching breakpad instruction sequences understood by the + parser, and parsing of the sequences from the regex match. + """ + + @abc.abstractmethod + def GetBreakpadInstructionsRegex(self) -> re.Pattern: + pass + + @abc.abstractmethod + def ParseFromMatch(self, address_offset: int, cfa_sp_offset: int, + match: re.Match) -> Tuple[AddressUnwind, int]: + """Returns the regex matching the breakpad instructions. + + Args: + address_offset: Offset from function start address. + cfa_sp_offset: CFA stack pointer offset. + + Returns: + The unwind info for the address plus the new cfa_sp_offset. + """ + + +class NullParser(UnwindInstructionsParser): + """Translates the state before any instruction has been executed.""" + + regex = re.compile(r'^\.cfa: sp 0 \+ \.ra: lr$') + + def GetBreakpadInstructionsRegex(self) -> re.Pattern: + return self.regex + + def ParseFromMatch(self, address_offset: int, cfa_sp_offset: int, + match: re.Match) -> Tuple[AddressUnwind, int]: + return AddressUnwind(address_offset, UnwindType.RETURN_TO_LR, 0, ()), 0 + + +class PushOrSubSpParser(UnwindInstructionsParser): + """Translates unwinds from push or sub sp, #constant instructions.""" + + # We expect at least one of the three outer groups to be non-empty. Cases: + # + # Standard prologue pushes. + # Match the first two and optionally the third. + # + # Standard prologue sub sp, #constant. + # Match only the first. + # + # Pushes in dynamic stack allocation functions after saving sp. + # Match only the third since they don't alter the stack pointer or store the + # return address. + # + # Leaf functions that use callee-save registers. + # Match the first and third but not the second. + regex = re.compile(r'^(?:\.cfa: sp (\d+) \+ ?)?' + r'(?:\.ra: \.cfa (-\d+) \+ \^ ?)?' + r'((?:r\d+: \.cfa -\d+ \+ \^ ?)*)$') + + # 'r' followed by digits, with 'r' matched via positive lookbehind so only the + # number appears in the match. + register_regex = re.compile('(?<=r)(\d+)') + + def GetBreakpadInstructionsRegex(self) -> re.Pattern: + return self.regex + + def ParseFromMatch(self, address_offset: int, cfa_sp_offset: int, + match: re.Match) -> Tuple[AddressUnwind, int]: + # The group will be None if the outer non-capturing groups for the(\d+) and + # (-\d+) expressions are not matched. + new_cfa_sp_offset, ra_cfa_offset = (int(group) if group else None + for group in match.groups()[:2]) + + # Registers are pushed in reverse order by register number so are popped in + # order. Sort them to ensure the proper order. + registers = sorted([ + int(register) + for register in self.register_regex.findall(match.group(3)) + # `UpdateSpAndOrPopRegisters` only supports popping of register + # r4 ~ r15. The ignored registers are translated to sp increments by + # the following calculation on `sp_offset`. + if int(register) in range(4, 16) + ] + + # Also pop lr (ra in breakpad terms) if it was stored. + ([14] if ra_cfa_offset is not None else [])) + + sp_offset = 0 + if new_cfa_sp_offset is not None: + sp_offset = new_cfa_sp_offset - cfa_sp_offset + assert sp_offset % 4 == 0 + if sp_offset >= len(registers) * 4: + # Handles the sub sp, #constant case, and push instructions that push + # caller-save registers r0-r3 which don't get encoded in the unwind + # instructions. In the latter case we need to move the stack pointer up + # to the first pushed register. + sp_offset -= len(registers) * 4 + + return AddressUnwind(address_offset, + UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, sp_offset, + tuple(registers)), new_cfa_sp_offset or cfa_sp_offset + + +class VPushParser(UnwindInstructionsParser): + # VPushes that occur in dynamic stack allocation functions after storing the + # stack pointer don't change the stack pointer or push any register that we + # care about. The first group will not match in those cases. + # + # Breakpad doesn't seem to understand how to name the floating point + # registers so calls them unnamed_register. + regex = re.compile(r'^(?:\.cfa: sp (\d+) \+ )?' + r'(?:unnamed_register\d+: \.cfa -\d+ \+ \^ ?)+$') + + def GetBreakpadInstructionsRegex(self) -> re.Pattern: + return self.regex + + def ParseFromMatch(self, address_offset: int, cfa_sp_offset: int, + match: re.Match) -> Tuple[AddressUnwind, int]: + # `match.group(1)`, which corresponds to the (\d+) expression, will be None + # if the first outer non-capturing group is not matched. + new_cfa_sp_offset = int(match.group(1)) if match.group(1) else None + if new_cfa_sp_offset is None: + return (AddressUnwind(address_offset, UnwindType.NO_ACTION, 0, + ()), cfa_sp_offset) + + sp_offset = new_cfa_sp_offset - cfa_sp_offset + assert sp_offset % 4 == 0 + return AddressUnwind(address_offset, + UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, sp_offset, + ()), new_cfa_sp_offset + + +class StoreSpParser(UnwindInstructionsParser): + regex = re.compile(r'^\.cfa: r(\d+) (\d+) \+$') + + def GetBreakpadInstructionsRegex(self) -> re.Pattern: + return self.regex + + def ParseFromMatch(self, address_offset: int, cfa_sp_offset: int, + match: re.Match) -> Tuple[AddressUnwind, int]: + register = int(match.group(1)) + new_cfa_sp_offset = int(match.group(2)) + sp_offset = new_cfa_sp_offset - cfa_sp_offset + assert sp_offset % 4 == 0 + return AddressUnwind(address_offset, UnwindType.RESTORE_SP_FROM_REGISTER, + sp_offset, (register, )), new_cfa_sp_offset + + +def EncodeUnwindInstructionTable(complete_instruction_sequences: Iterable[bytes] + ) -> Tuple[bytes, Dict[bytes, int]]: + """Encodes the unwind instruction table. + + Deduplicates the encoded unwind instruction sequences. Generates the table and + a dictionary mapping a function to its starting index in the table. + + The instruction table is used by the unwinder to provide the sequence of + unwind instructions to execute for each function, separated by offset + into the function. + + Args: + complete_instruction_sequences: An iterable of encoded unwind instruction + sequences. The sequences represent the series of unwind instructions to + execute corresponding to offsets within each function. + + Returns: + A tuple containing: + - The unwind instruction table as bytes. + - The mapping from the instruction sequence to the offset in the unwind + instruction table. This mapping is used to construct the function offset + table, which references entries in the unwind instruction table. + """ + # As the function offset table uses variable length number encoding (uleb128), + # which means smaller number uses fewer bytes to represent, we should sort + # the unwind instruction table by number of references from the function + # offset table in order to minimize the size of the function offset table. + ref_counts: Dict[bytes, int] = collections.defaultdict(int) + for sequence in complete_instruction_sequences: + ref_counts[sequence] += 1 + + def ComputeScore(sequence): + """ Score for each sequence is computed as ref_count / size_of_sequence. + + According to greedy algorithm, items with higher value / space cost ratio + should be prioritized. Here value is bytes saved in the function offset + table, represetned by ref_count. Space cost is the space taken in the + unwind instruction table, represented by size_of_sequence. + + Note: In order to ensure build-time determinism, `sequence` is also returned + to resolve sorting order when scores are the same. + """ + return ref_counts[sequence] / len(sequence), sequence + + ordered_sequences = sorted(ref_counts.keys(), key=ComputeScore, reverse=True) + offsets: Dict[bytes, int] = {} + current_offset = 0 + for sequence in ordered_sequences: + offsets[sequence] = current_offset + current_offset += len(sequence) + return b''.join(ordered_sequences), offsets + + +class EncodedAddressUnwind(NamedTuple): + """Record representing unwind information for an address within a function. + + This structure represents the same concept as `AddressUnwind`. The only + difference is that how to unwind from the address is represented as + encoded ARM unwind instructions. + + Attributes: + address_offset: The offset of the address from the start address of the + function. + complete_instruction_sequence: The full ARM unwind instruction sequence to + unwind from the `address_offset`. + """ + address_offset: int + complete_instruction_sequence: bytes + + +def EncodeAddressUnwinds(address_unwinds: Tuple[AddressUnwind, ...] + ) -> Tuple[EncodedAddressUnwind, ...]: + """Encodes the unwind instructions and offset for the addresses within a + function. + + Args: + address_unwinds: A tuple of unwind state for addresses within a function. + + Returns: + The encoded unwind instructions and offsets for the addresses within a + function, ordered by decreasing offset. + """ + sorted_address_unwinds: List[AddressUnwind] = sorted( + address_unwinds, + key=lambda address_unwind: address_unwind.address_offset, + reverse=True) + unwind_instructions: List[bytes] = [ + EncodeAddressUnwind(address_unwind) + for address_unwind in sorted_address_unwinds + ] + + # A complete instruction sequence contains all the unwind instructions + # necessary to unwind from an offset within a function. For a given offset + # this includes the offset's instructions plus the instructions for all + # earlier offsets. The offsets are stored in reverse order, hence the i: + # range rather than :i+1. + complete_instruction_sequences = [ + b''.join(unwind_instructions[i:]) for i in range(len(unwind_instructions)) + ] + + encoded_unwinds: List[EncodedAddressUnwind] = [] + for address_unwind, sequence in zip(sorted_address_unwinds, + complete_instruction_sequences): + encoded_unwinds.append( + EncodedAddressUnwind(address_unwind.address_offset, sequence)) + return tuple(encoded_unwinds) + + +class EncodedFunctionUnwind(NamedTuple): + """Record representing unwind information for a function. + + This structure represents the same concept as `FunctionUnwind`, but with + some differences: + - Attribute `address` is split into 2 attributes: `page_number` and + `page_offset`. + - Attribute `size` is dropped. + - Attribute `address_unwinds` becomes a collection of `EncodedAddressUnwind`s, + instead of a collection of `AddressUnwind`s. + + Attributes: + page_number: The upper bits (17 ~ 31bits) of byte offset from text section + start. + page_offset: The lower bits (1 ~ 16bits) of instruction offset from text + section start. + address_unwinds: A collection of `EncodedAddressUnwind`s. + + """ + + page_number: int + page_offset: int + address_unwinds: Tuple[EncodedAddressUnwind, ...] + + +# The trivial unwind is defined as a single `RETURN_TO_LR` instruction +# at the start of the function. +TRIVIAL_UNWIND: Tuple[EncodedAddressUnwind, ...] = EncodeAddressUnwinds( + (AddressUnwind(address_offset=0, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=()), )) + +# The refuse to unwind filler unwind is used to fill the invalid space +# before the first function in the first page and after the last function +# in the last page. +REFUSE_TO_UNWIND: Tuple[EncodedAddressUnwind, ...] = (EncodedAddressUnwind( + address_offset=0, + complete_instruction_sequence=bytes([0b10000000, 0b00000000])), ) + + +def EncodeFunctionUnwinds(function_unwinds: Iterable[FunctionUnwind], + text_section_start_address: int + ) -> Iterable[EncodedFunctionUnwind]: + """Encodes the unwind state for all functions defined in the binary. + + This function + - sorts the collection of `FunctionUnwind`s by address. + - fills in gaps between functions with trivial unwind. + - fills the space in the last page after last function with refuse to unwind. + - fills the space in the first page before the first function with refuse + to unwind. + + Args: + function_unwinds: An iterable of function unwind states. + text_section_start_address: The address of .text section in ELF file. + + Returns: + The encoded function unwind states with no gaps between functions, ordered + by ascending address. + """ + + def GetPageNumber(address: int) -> int: + """Calculates the page number. + + Page number is calculated as byte_offset_from_text_section_start >> 17, + i.e. the upper bits (17 ~ 31bits) of byte offset from text section start. + """ + return (address - text_section_start_address) >> 17 + + def GetPageOffset(address: int) -> int: + """Calculates the page offset. + + Page offset is calculated as (byte_offset_from_text_section_start >> 1) + & 0xffff, i.e. the lower bits (1 ~ 16bits) of instruction offset from + text section start. + """ + return ((address - text_section_start_address) >> 1) & 0xffff + + sorted_function_unwinds: List[FunctionUnwind] = sorted( + function_unwinds, key=lambda function_unwind: function_unwind.address) + + if sorted_function_unwinds[0].address > text_section_start_address: + yield EncodedFunctionUnwind(page_number=0, + page_offset=0, + address_unwinds=REFUSE_TO_UNWIND) + + prev_func_end_address: int = sorted_function_unwinds[0].address + + gaps = 0 + for unwind in sorted_function_unwinds: + assert prev_func_end_address <= unwind.address, ( + 'Detected overlap between functions.') + + if prev_func_end_address < unwind.address: + # Gaps between functions are typically filled by regions of thunks which + # do not alter the stack pointer. Filling these gaps with TRIVIAL_UNWIND + # is the appropriate unwind strategy. + gaps += 1 + yield EncodedFunctionUnwind(GetPageNumber(prev_func_end_address), + GetPageOffset(prev_func_end_address), + TRIVIAL_UNWIND) + + yield EncodedFunctionUnwind(GetPageNumber(unwind.address), + GetPageOffset(unwind.address), + EncodeAddressUnwinds(unwind.address_unwinds)) + + prev_func_end_address = unwind.address + unwind.size + + if GetPageOffset(prev_func_end_address) != 0: + yield EncodedFunctionUnwind(GetPageNumber(prev_func_end_address), + GetPageOffset(prev_func_end_address), + REFUSE_TO_UNWIND) + + logging.info('%d/%d gaps between functions filled with trivial unwind.', gaps, + len(sorted_function_unwinds)) + + +def EncodeFunctionOffsetTable( + encoded_address_unwind_sequences: Iterable[ + Tuple[EncodedAddressUnwind, ...]], + unwind_instruction_table_offsets: Dict[bytes, int] +) -> Tuple[bytes, Dict[Tuple[EncodedAddressUnwind, ...], int]]: + """Encodes the function offset table. + + The function offset table maps local instruction offset from function + start to the location in the unwind instruction table. + + Args: + encoded_address_unwind_sequences: An iterable of encoded address unwind + sequences. + unwind_instruction_table_offsets: The offset mapping returned from + `EncodeUnwindInstructionTable`. + + Returns: + A tuple containing: + - The function offset table as bytes. + - The mapping from the `EncodedAddressUnwind`s to the offset in the function + offset table. This mapping is used to construct the function table, which + references entries in the function offset table. + """ + function_offset_table = bytearray() + offsets: Dict[Tuple[EncodedAddressUnwind, ...], int] = {} + + for sequence in encoded_address_unwind_sequences: + if sequence in offsets: + continue + + offsets[sequence] = len(function_offset_table) + for address_offset, complete_instruction_sequence in sequence: + # Note: address_offset is the number of bytes from one address to another, + # while the instruction_offset is the number of 2-byte instructions + # from one address to another. + instruction_offset = address_offset >> 1 + function_offset_table += ( + Uleb128Encode(instruction_offset) + Uleb128Encode( + unwind_instruction_table_offsets[complete_instruction_sequence])) + + return bytes(function_offset_table), offsets + + +def EncodePageTableAndFunctionTable( + function_unwinds: Iterable[EncodedFunctionUnwind], + function_offset_table_offsets: Dict[Tuple[EncodedAddressUnwind, ...], int] +) -> Tuple[bytes, bytes]: + """Encode page table and function table as bytes. + + Page table: + A table that contains the mapping from page_number to the location of the + entry for the first function on the page in the function table. + + Function table: + A table that contains the mapping from page_offset to the location of an entry + in the function offset table. + + Args: + function_unwinds: All encoded function unwinds in the module. + function_offset_table_offsets: The offset mapping returned from + `EncodeFunctionOffsetTable`. + + Returns: + A tuple containing: + - The page table as bytes. + - The function table as bytes. + """ + page_function_unwinds: Dict[ + int, List[EncodedFunctionUnwind]] = collections.defaultdict(list) + for function_unwind in function_unwinds: + page_function_unwinds[function_unwind.page_number].append(function_unwind) + + raw_page_table: List[int] = [] + function_table = bytearray() + + for page_number, same_page_function_unwinds in sorted( + page_function_unwinds.items(), key=lambda item: item[0]): + # Pad empty pages. + # Empty pages can occur when a function spans over multiple pages. + # Example: + # A page table with a starting function that spans 3 over pages. + # page_table: + # [0, 1, 1, 1] + # function_table: + # [ + # # Page 0 + # (0, 20) # This function spans from page 0 offset 0 to page 3 offset 5. + # # Page 1 is empty. + # # Page 2 is empty. + # # Page 3 + # (6, 70) + # ] + assert page_number > len(raw_page_table) - 1 + number_of_empty_pages = page_number - len(raw_page_table) + # The function table is represented as `base::FunctionTableEntry[]`, + # where `base::FunctionTableEntry` is 4 bytes. + function_table_index = len(function_table) // 4 + raw_page_table.extend([function_table_index] * (number_of_empty_pages + 1)) + assert page_number == len(raw_page_table) - 1 + + for function_unwind in sorted( + same_page_function_unwinds, + key=lambda function_unwind: function_unwind.page_offset): + function_table += struct.pack( + 'HH', function_unwind.page_offset, + function_offset_table_offsets[function_unwind.address_unwinds]) + + page_table = struct.pack(f'{len(raw_page_table)}I', *raw_page_table) + + return page_table, bytes(function_table) + + +ALL_PARSERS: Tuple[UnwindInstructionsParser, ...] = ( + NullParser(), + PushOrSubSpParser(), + StoreSpParser(), + VPushParser(), +) + + +def ParseAddressCfi(address_cfi: AddressCfi, function_start_address: int, + parsers: Tuple[UnwindInstructionsParser, ...], + prev_cfa_sp_offset: int + ) -> Tuple[Union[AddressUnwind, None], bool, int]: + """Parses address CFI with given parsers. + + Args: + address_cfi: The CFI for an address in the function. + function_start_address: The start address of the function. + parsers: Available parsers to try on CFI data. + prev_cfa_sp_offset: Previous CFA stack pointer offset. + + Returns: + A tuple containing: + - An `AddressUnwind` object when the parse is successful, None otherwise. + - Whether the address is in function epilogue. + - The new cfa_sp_offset. + """ + for parser in parsers: + match = parser.GetBreakpadInstructionsRegex().search( + address_cfi.unwind_instructions) + if not match: + continue + + address_unwind, cfa_sp_offset = parser.ParseFromMatch( + address_cfi.address - function_start_address, prev_cfa_sp_offset, match) + + in_epilogue = ( + prev_cfa_sp_offset > cfa_sp_offset + and address_unwind.unwind_type != UnwindType.RESTORE_SP_FROM_REGISTER) + + return (address_unwind if not in_epilogue else None, in_epilogue, + cfa_sp_offset) + + return None, False, prev_cfa_sp_offset + + +def GenerateUnwinds(function_cfis: Iterable[FunctionCfi], + parsers: Tuple[UnwindInstructionsParser, ...] + ) -> Iterable[FunctionUnwind]: + """Generates parsed function unwind states from breakpad CFI data. + + This function parses `FunctionCfi`s to `FunctionUnwind`s using + `UnwindInstructionParser`. + + Args: + function_cfis: An iterable of function CFI data. + parsers: Available parsers to try on CFI address data. + + Returns: + An iterable of parsed function unwind states. + """ + functions = 0 + addresses = 0 + handled_addresses = 0 + epilogues_seen = 0 + + for function_cfi in function_cfis: + functions += 1 + address_unwinds: List[AddressUnwind] = [] + cfa_sp_offset = 0 + for address_cfi in function_cfi.address_cfi: + addresses += 1 + + address_unwind, in_epilogue, cfa_sp_offset = ParseAddressCfi( + address_cfi, function_cfi.address_cfi[0].address, parsers, + cfa_sp_offset) + + if address_unwind: + handled_addresses += 1 + address_unwinds.append(address_unwind) + continue + + if in_epilogue: + epilogues_seen += 1 + break + + logging.info('unrecognized CFI: %x %s.', address_cfi.address, + address_cfi.unwind_instructions) + + if address_unwinds: + # We expect that the unwind information for every function starts with a + # trivial unwind (RETURN_TO_LR) prior to the execution of any code in the + # function. This is required by the arm calling convention which involves + # setting lr to the return address on calling into a function. + assert address_unwinds[0].address_offset == 0 + assert address_unwinds[0].unwind_type == UnwindType.RETURN_TO_LR + + yield FunctionUnwind(function_cfi.address_cfi[0].address, + function_cfi.size, tuple(address_unwinds)) + + logging.info('%d functions.', functions) + logging.info('%d/%d addresses handled.', handled_addresses, addresses) + logging.info('epilogues_seen: %d.', epilogues_seen) + + +def EncodeUnwindInfo(page_table: bytes, function_table: bytes, + function_offset_table: bytes, + unwind_instruction_table: bytes) -> bytes: + """Encodes all unwind tables as a single binary. + + Concats all unwind table binaries together and attach a header at the start + with a offset-size pair for each table. + + offset: The offset to the target table from the start of the unwind info + binary in bytes. + size: The declared size of the target table. + + Both offset and size are represented as 32bit integers. + See `base::ChromeUnwindInfoHeaderAndroid` for more details. + + Args: + page_table: The page table as bytes. + function_table: The function table as bytes. + function_offset_table: The function offset table as bytes. + unwind_instruction_table: The unwind instruction table as bytes. + + Returns: + A single binary containing + - A header that points to the location of each table. + - All unwind tables. + """ + unwind_info_header = bytearray() + # Each table is represented as (offset, size) pair, both offset and size + # are represented as 4 byte integer. + unwind_info_header_size = 4 * 2 * 4 + unwind_info_body = bytearray() + + # Both the page_table and the function table need to be aligned because their + # contents are interpreted as multi-byte integers. However, the byte size of + # the header, the page table, the function table are all multiples of 4 and + # the resource will be memory mapped at a 4 byte boundary, so no extra care + # is required to align the page table and the function table. + # + # The function offset table and the unwind instruction table are accessed + # byte by byte, so they only need 1 byte alignment. + + assert len(page_table) % 4 == 0, ( + 'Each entry in the page table should be 4-byte integer.') + assert len(function_table) % 4 == 0, ( + 'Each entry in the function table should be a pair of 2 2-byte integers.') + + for table in page_table, function_table: + offset = unwind_info_header_size + len(unwind_info_body) + # For the page table and the function_table, declared size is the number of + # entries in each table. The tables will be aligned to a 4 byte boundary + # because the resource will be memory mapped at a 4 byte boundary and the + # header is a multiple of 4 bytes. + declared_size = len(table) // 4 + unwind_info_header += struct.pack('II', offset, declared_size) + unwind_info_body += table + + for table in function_offset_table, unwind_instruction_table: + offset = unwind_info_header_size + len(unwind_info_body) + # Because both the function offset table and the unwind instruction table + # contain variable length encoded numbers, the declared size is simply the + # number of bytes in each table. The tables only require 1 byte alignment. + declared_size = len(table) + unwind_info_header += struct.pack('II', offset, declared_size) + unwind_info_body += table + + return bytes(unwind_info_header + unwind_info_body) + + +def GenerateUnwindTables( + encoded_function_unwinds_iterable: Iterable[EncodedFunctionUnwind] +) -> Tuple[bytes, bytes, bytes, bytes]: + """Generates all unwind tables as bytes. + + Args: + encoded_function_unwinds_iterable: Encoded function unwinds for all + functions in the ELF binary. + + Returns: + A tuple containing: + - The page table as bytes. + - The function table as bytes. + - The function offset table as bytes. + - The unwind instruction table as bytes. + """ + encoded_function_unwinds: List[EncodedFunctionUnwind] = list( + encoded_function_unwinds_iterable) + complete_instruction_sequences: List[bytes] = [] + encoded_address_unwind_sequences: List[Tuple[EncodedAddressUnwind, ...]] = [] + + for encoded_function_unwind in encoded_function_unwinds: + encoded_address_unwind_sequences.append( + encoded_function_unwind.address_unwinds) + for address_unwind in encoded_function_unwind.address_unwinds: + complete_instruction_sequences.append( + address_unwind.complete_instruction_sequence) + + unwind_instruction_table, unwind_instruction_table_offsets = ( + EncodeUnwindInstructionTable(complete_instruction_sequences)) + + function_offset_table, function_offset_table_offsets = ( + EncodeFunctionOffsetTable(encoded_address_unwind_sequences, + unwind_instruction_table_offsets)) + + page_table, function_table = EncodePageTableAndFunctionTable( + encoded_function_unwinds, function_offset_table_offsets) + + return (page_table, function_table, function_offset_table, + unwind_instruction_table) + + +def ReadTextSectionStartAddress(readobj_path: str, libchrome_path: str) -> int: + """Reads the .text section start address of libchrome ELF. + + Arguments: + readobj_path: Path to llvm-obj binary. + libchrome_path: Path to libchrome binary. + + Returns: + The text section start address as a number. + """ + def GetSectionName(section) -> str: + # See crbug.com/1426287 for context on different JSON names. + if 'Name' in section['Section']['Name']: + return section['Section']['Name']['Name'] + return section['Section']['Name']['Value'] + + proc = subprocess.Popen( + [readobj_path, '--sections', '--elf-output-style=JSON', libchrome_path], + stdout=subprocess.PIPE, + encoding='ascii') + + elfs = json.loads(proc.stdout.read())[0] + sections = elfs['Sections'] + + return next(s['Section']['Address'] for s in sections + if GetSectionName(s) == '.text') + + +def main(): + build_utils.InitLogging('CREATE_UNWIND_TABLE_DEBUG') + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('--input_path', + help='Path to the unstripped binary.', + required=True, + metavar='FILE') + parser.add_argument('--output_path', + help='Path to unwind info binary output.', + required=True, + metavar='FILE') + parser.add_argument('--dump_syms_path', + required=True, + help='The path of the dump_syms binary.', + metavar='FILE') + parser.add_argument('--readobj_path', + required=True, + help='The path of the llvm-readobj binary.', + metavar='FILE') + + args = parser.parse_args() + proc = subprocess.Popen(['./' + args.dump_syms_path, args.input_path, '-v'], + stdout=subprocess.PIPE, + encoding='ascii') + + function_cfis = ReadFunctionCfi(proc.stdout) + function_unwinds = GenerateUnwinds(function_cfis, parsers=ALL_PARSERS) + encoded_function_unwinds = EncodeFunctionUnwinds( + function_unwinds, + ReadTextSectionStartAddress(args.readobj_path, args.input_path)) + (page_table, function_table, function_offset_table, + unwind_instruction_table) = GenerateUnwindTables(encoded_function_unwinds) + unwind_info: bytes = EncodeUnwindInfo(page_table, function_table, + function_offset_table, + unwind_instruction_table) + + if proc.wait(): + logging.critical('dump_syms exited with return code %d', proc.returncode) + sys.exit(proc.returncode) + + with open(args.output_path, 'wb') as f: + f.write(unwind_info) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/create_unwind_table_tests.py b/build/android/gyp/create_unwind_table_tests.py new file mode 100755 index 000000000000..14fbc227ff2c --- /dev/null +++ b/build/android/gyp/create_unwind_table_tests.py @@ -0,0 +1,1182 @@ +#!/usr/bin/env python3 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Tests for create_unwind_table.py. + +This test suite contains tests for the custom unwind table creation for 32-bit +arm builds. +""" + +import io +import struct + +import unittest +import unittest.mock +import re + +from create_unwind_table import ( + AddressCfi, AddressUnwind, FilterToNonTombstoneCfi, FunctionCfi, + FunctionUnwind, EncodeAddressUnwind, EncodeAddressUnwinds, + EncodedAddressUnwind, EncodeAsBytes, EncodeFunctionOffsetTable, + EncodedFunctionUnwind, EncodeFunctionUnwinds, EncodeStackPointerUpdate, + EncodePop, EncodePageTableAndFunctionTable, EncodeUnwindInfo, + EncodeUnwindInstructionTable, GenerateUnwinds, GenerateUnwindTables, + NullParser, ParseAddressCfi, PushOrSubSpParser, ReadFunctionCfi, + REFUSE_TO_UNWIND, StoreSpParser, TRIVIAL_UNWIND, Uleb128Encode, + UnwindInstructionsParser, UnwindType, VPushParser) + + +class _TestReadFunctionCfi(unittest.TestCase): + def testFilterTombstone(self): + input_lines = [ + 'file name', + 'STACK CFI INIT 0 ', + 'STACK CFI 100 ', + 'STACK CFI INIT 1 ', + 'STACK CFI 200 ', + ] + + f = io.StringIO(''.join(line + '\n' for line in input_lines)) + + self.assertEqual([ + 'STACK CFI INIT 1 \n', + 'STACK CFI 200 \n', + ], list(FilterToNonTombstoneCfi(f))) + + def testReadFunctionCfiTombstoneFiltered(self): + input_lines = [ + 'STACK CFI INIT 0 50 .cfa: sp 0 + .ra: lr', # Tombstone function. + 'STACK CFI 2 .cfa: sp 24 + .ra: .cfa - 4 + ^ r4: .cfa - 16 + ^ ' + 'r5: .cfa - 12 + ^ r7: .cfa - 8 + ^', + 'STACK CFI INIT 15b6490 4 .cfa: sp 0 + .ra: lr', + ] + + f = io.StringIO(''.join(line + '\n' for line in input_lines)) + + self.assertEqual( + [FunctionCfi(4, (AddressCfi(0x15b6490, '.cfa: sp 0 + .ra: lr'), ))], + list(ReadFunctionCfi(f))) + + def testReadFunctionCfiSingleFunction(self): + input_lines = [ + 'STACK CFI INIT 15b6490 4 .cfa: sp 0 + .ra: lr', + 'STACK CFI 2 .cfa: sp 24 + .ra: .cfa - 4 + ^ r4: .cfa - 16 + ^ ' + 'r5: .cfa - 12 + ^ r7: .cfa - 8 + ^', + ] + + f = io.StringIO(''.join(line + '\n' for line in input_lines)) + + self.assertEqual([ + FunctionCfi(4, ( + AddressCfi(0x15b6490, '.cfa: sp 0 + .ra: lr'), + AddressCfi( + 0x2, '.cfa: sp 24 + .ra: .cfa - 4 + ^ r4: .cfa - 16 + ^ ' + 'r5: .cfa - 12 + ^ r7: .cfa - 8 + ^'), + )) + ], list(ReadFunctionCfi(f))) + + def testReadFunctionCfiMultipleFunctions(self): + input_lines = [ + 'STACK CFI INIT 15b6490 4 .cfa: sp 0 + .ra: lr', + 'STACK CFI 2 .cfa: sp 24 + .ra: .cfa - 4 + ^ r4: .cfa - 16 + ^ ' + 'r5: .cfa - 12 + ^ r7: .cfa - 8 + ^', + 'STACK CFI INIT 15b655a 26 .cfa: sp 0 + .ra: lr', + 'STACK CFI 15b655c .cfa: sp 8 + .ra: .cfa - 4 + ^ r4: .cfa - 8 + ^', + ] + + f = io.StringIO(''.join(line + '\n' for line in input_lines)) + + self.assertEqual([ + FunctionCfi(0x4, ( + AddressCfi(0x15b6490, '.cfa: sp 0 + .ra: lr'), + AddressCfi( + 0x2, '.cfa: sp 24 + .ra: .cfa - 4 + ^ r4: .cfa - 16 + ^ ' + 'r5: .cfa - 12 + ^ r7: .cfa - 8 + ^'), + )), + FunctionCfi(0x26, ( + AddressCfi(0x15b655a, '.cfa: sp 0 + .ra: lr'), + AddressCfi(0x15b655c, + '.cfa: sp 8 + .ra: .cfa - 4 + ^ r4: .cfa - 8 + ^'), + )), + ], list(ReadFunctionCfi(f))) + + +class _TestEncodeAsBytes(unittest.TestCase): + def testOutOfBounds(self): + self.assertRaises(ValueError, lambda: EncodeAsBytes(1024)) + self.assertRaises(ValueError, lambda: EncodeAsBytes(256)) + self.assertRaises(ValueError, lambda: EncodeAsBytes(-1)) + + def testEncode(self): + self.assertEqual(bytes([0]), EncodeAsBytes(0)) + self.assertEqual(bytes([255]), EncodeAsBytes(255)) + self.assertEqual(bytes([0, 1]), EncodeAsBytes(0, 1)) + + +class _TestUleb128Encode(unittest.TestCase): + def testNegativeValue(self): + self.assertRaises(ValueError, lambda: Uleb128Encode(-1)) + + def testSingleByte(self): + self.assertEqual(bytes([0]), Uleb128Encode(0)) + self.assertEqual(bytes([1]), Uleb128Encode(1)) + self.assertEqual(bytes([127]), Uleb128Encode(127)) + + def testMultiBytes(self): + self.assertEqual(bytes([0b10000000, 0b1]), Uleb128Encode(128)) + self.assertEqual(bytes([0b10000000, 0b10000000, 0b1]), + Uleb128Encode(128**2)) + + +class _TestEncodeStackPointerUpdate(unittest.TestCase): + def testSingleByte(self): + self.assertEqual(bytes([0b00000000 | 0]), EncodeStackPointerUpdate(4)) + self.assertEqual(bytes([0b01000000 | 0]), EncodeStackPointerUpdate(-4)) + + self.assertEqual(bytes([0b00000000 | 0b00111111]), + EncodeStackPointerUpdate(0x100)) + self.assertEqual(bytes([0b01000000 | 0b00111111]), + EncodeStackPointerUpdate(-0x100)) + + self.assertEqual(bytes([0b00000000 | 3]), EncodeStackPointerUpdate(16)) + self.assertEqual(bytes([0b01000000 | 3]), EncodeStackPointerUpdate(-16)) + + self.assertEqual(bytes([0b00111111]), EncodeStackPointerUpdate(0x100)) + + # 10110010 uleb128 + # vsp = vsp + 0x204 + (uleb128 << 2) + self.assertEqual(bytes([0b10110010, 0b00000000]), + EncodeStackPointerUpdate(0x204)) + self.assertEqual(bytes([0b10110010, 0b00000001]), + EncodeStackPointerUpdate(0x208)) + + # For vsp increments of 0x104-0x200, use 00xxxxxx twice. + self.assertEqual(bytes([0b00111111, 0b00000000]), + EncodeStackPointerUpdate(0x104)) + self.assertEqual(bytes([0b00111111, 0b00111111]), + EncodeStackPointerUpdate(0x200)) + self.assertEqual(bytes([0b01111111, 0b01111111]), + EncodeStackPointerUpdate(-0x200)) + + # Not multiple of 4. + self.assertRaises(AssertionError, lambda: EncodeStackPointerUpdate(101)) + # offset=0 is meaningless. + self.assertRaises(AssertionError, lambda: EncodeStackPointerUpdate(0)) + + +class _TestEncodePop(unittest.TestCase): + def testSingleRegister(self): + # Should reject registers outside r4 ~ r15 range. + for r in 0, 1, 2, 3, 16: + self.assertRaises(AssertionError, lambda: EncodePop([r])) + # Should use + # 1000iiii iiiiiiii + # Pop up to 12 integer registers under masks {r15-r12}, {r11-r4}. + self.assertEqual(bytes([0b10000000, 0b00000001]), EncodePop([4])) + self.assertEqual(bytes([0b10000000, 0b00001000]), EncodePop([7])) + self.assertEqual(bytes([0b10000100, 0b00000000]), EncodePop([14])) + self.assertEqual(bytes([0b10001000, 0b00000000]), EncodePop([15])) + + def testContinuousRegisters(self): + # 10101nnn + # Pop r4-r[4+nnn], r14. + self.assertEqual(bytes([0b10101000]), EncodePop([4, 14])) + self.assertEqual(bytes([0b10101001]), EncodePop([4, 5, 14])) + self.assertEqual(bytes([0b10101111]), + EncodePop([4, 5, 6, 7, 8, 9, 10, 11, 14])) + + def testDiscontinuousRegisters(self): + # 1000iiii iiiiiiii + # Pop up to 12 integer registers under masks {r15-r12}, {r11-r4}. + self.assertEqual(bytes([0b10001000, 0b00000001]), EncodePop([4, 15])) + self.assertEqual(bytes([0b10000100, 0b00011000]), EncodePop([7, 8, 14])) + self.assertEqual(bytes([0b10000111, 0b11111111]), + EncodePop([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14])) + self.assertEqual(bytes([0b10000100, 0b10111111]), + EncodePop([4, 5, 6, 7, 8, 9, 11, 14])) + + +class _TestEncodeAddressUnwind(unittest.TestCase): + def testReturnToLr(self): + self.assertEqual( + bytes([0b10110000]), + EncodeAddressUnwind( + AddressUnwind(address_offset=0, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=tuple()))) + + def testNoAction(self): + self.assertEqual( + bytes([]), + EncodeAddressUnwind( + AddressUnwind(address_offset=0, + unwind_type=UnwindType.NO_ACTION, + sp_offset=0, + registers=tuple()))) + + def testUpdateSpAndOrPopRegisters(self): + self.assertEqual( + bytes([0b0, 0b10101000]), + EncodeAddressUnwind( + AddressUnwind(address_offset=0, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=0x4, + registers=(4, 14)))) + + self.assertEqual( + bytes([0b0]), + EncodeAddressUnwind( + AddressUnwind(address_offset=0, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=0x4, + registers=tuple()))) + + self.assertEqual( + bytes([0b10101000]), + EncodeAddressUnwind( + AddressUnwind(address_offset=0, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=0, + registers=(4, 14)))) + + def testRestoreSpFromRegisters(self): + self.assertEqual( + bytes([0b10010100, 0b0]), + EncodeAddressUnwind( + AddressUnwind(address_offset=0, + unwind_type=UnwindType.RESTORE_SP_FROM_REGISTER, + sp_offset=0x4, + registers=(4, )))) + + self.assertEqual( + bytes([0b10010100]), + EncodeAddressUnwind( + AddressUnwind(address_offset=0, + unwind_type=UnwindType.RESTORE_SP_FROM_REGISTER, + sp_offset=0, + registers=(4, )))) + + self.assertRaises( + AssertionError, lambda: EncodeAddressUnwind( + AddressUnwind(address_offset=0, + unwind_type=UnwindType.RESTORE_SP_FROM_REGISTER, + sp_offset=0x4, + registers=tuple()))) + + +class _TestEncodeAddressUnwinds(unittest.TestCase): + def testEncodeOrder(self): + address_unwind1 = AddressUnwind(address_offset=0, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=tuple()) + address_unwind2 = AddressUnwind( + address_offset=4, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=0, + registers=(4, 14)) + + def MockEncodeAddressUnwind(address_unwind): + return { + address_unwind1: bytes([1]), + address_unwind2: bytes([2]), + }[address_unwind] + + with unittest.mock.patch("create_unwind_table.EncodeAddressUnwind", + side_effect=MockEncodeAddressUnwind): + encoded_unwinds = EncodeAddressUnwinds((address_unwind1, address_unwind2)) + self.assertEqual(( + EncodedAddressUnwind(4, + bytes([2]) + bytes([1])), + EncodedAddressUnwind(0, bytes([1])), + ), encoded_unwinds) + + +PAGE_SIZE = 1 << 17 + + +class _TestEncodeFunctionUnwinds(unittest.TestCase): + @unittest.mock.patch('create_unwind_table.EncodeAddressUnwinds') + def testEncodeOrder(self, MockEncodeAddressUnwinds): + MockEncodeAddressUnwinds.return_value = EncodedAddressUnwind(0, b'\x00') + + self.assertEqual([ + EncodedFunctionUnwind(page_number=0, + page_offset=0, + address_unwinds=EncodedAddressUnwind(0, b'\x00')), + EncodedFunctionUnwind(page_number=0, + page_offset=100 >> 1, + address_unwinds=EncodedAddressUnwind(0, b'\x00')), + ], + list( + EncodeFunctionUnwinds([ + FunctionUnwind(address=100, + size=PAGE_SIZE - 100, + address_unwinds=()), + FunctionUnwind( + address=0, size=100, address_unwinds=()), + ], + text_section_start_address=0))) + + @unittest.mock.patch('create_unwind_table.EncodeAddressUnwinds') + def testFillingGaps(self, MockEncodeAddressUnwinds): + MockEncodeAddressUnwinds.return_value = EncodedAddressUnwind(0, b'\x00') + + self.assertEqual([ + EncodedFunctionUnwind(page_number=0, + page_offset=0, + address_unwinds=EncodedAddressUnwind(0, b'\x00')), + EncodedFunctionUnwind( + page_number=0, page_offset=50 >> 1, address_unwinds=TRIVIAL_UNWIND), + EncodedFunctionUnwind(page_number=0, + page_offset=100 >> 1, + address_unwinds=EncodedAddressUnwind(0, b'\x00')), + ], + list( + EncodeFunctionUnwinds([ + FunctionUnwind( + address=0, size=50, address_unwinds=()), + FunctionUnwind(address=100, + size=PAGE_SIZE - 100, + address_unwinds=()), + ], + text_section_start_address=0))) + + @unittest.mock.patch('create_unwind_table.EncodeAddressUnwinds') + def testFillingLastPage(self, MockEncodeAddressUnwinds): + MockEncodeAddressUnwinds.return_value = EncodedAddressUnwind(0, b'\x00') + + self.assertEqual( + [ + EncodedFunctionUnwind(page_number=0, + page_offset=0, + address_unwinds=EncodedAddressUnwind( + 0, b'\x00')), + EncodedFunctionUnwind(page_number=0, + page_offset=100 >> 1, + address_unwinds=EncodedAddressUnwind( + 0, b'\x00')), + EncodedFunctionUnwind(page_number=0, + page_offset=200 >> 1, + address_unwinds=REFUSE_TO_UNWIND), + ], + list( + EncodeFunctionUnwinds([ + FunctionUnwind(address=1100, size=100, address_unwinds=()), + FunctionUnwind(address=1200, size=100, address_unwinds=()), + ], + text_section_start_address=1100))) + + @unittest.mock.patch('create_unwind_table.EncodeAddressUnwinds') + def testFillingFirstPage(self, MockEncodeAddressUnwinds): + MockEncodeAddressUnwinds.return_value = EncodedAddressUnwind(0, b'\x00') + + self.assertEqual( + [ + EncodedFunctionUnwind( + page_number=0, page_offset=0, address_unwinds=REFUSE_TO_UNWIND), + EncodedFunctionUnwind(page_number=0, + page_offset=100 >> 1, + address_unwinds=EncodedAddressUnwind( + 0, b'\x00')), + EncodedFunctionUnwind(page_number=0, + page_offset=200 >> 1, + address_unwinds=EncodedAddressUnwind( + 0, b'\x00')), + EncodedFunctionUnwind(page_number=0, + page_offset=300 >> 1, + address_unwinds=REFUSE_TO_UNWIND), + ], + list( + EncodeFunctionUnwinds([ + FunctionUnwind(address=1100, size=100, address_unwinds=()), + FunctionUnwind(address=1200, size=100, address_unwinds=()), + ], + text_section_start_address=1000))) + + @unittest.mock.patch('create_unwind_table.EncodeAddressUnwinds') + def testOverlappedFunctions(self, _): + self.assertRaises( + # Eval generator with `list`. Otherwise the code will not execute. + AssertionError, + lambda: list( + EncodeFunctionUnwinds([ + FunctionUnwind(address=0, size=100, address_unwinds=()), + FunctionUnwind(address=50, size=100, address_unwinds=()), + ], + text_section_start_address=0))) + + +class _TestNullParser(unittest.TestCase): + def testCfaChange(self): + parser = NullParser() + match = parser.GetBreakpadInstructionsRegex().search('.cfa: sp 0 + .ra: lr') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=0, + cfa_sp_offset=0, + match=match) + + self.assertEqual(0, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=0, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=()), address_unwind) + + +class _TestPushOrSubSpParser(unittest.TestCase): + def testCfaChange(self): + parser = PushOrSubSpParser() + match = parser.GetBreakpadInstructionsRegex().search('.cfa: sp 4 +') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=0, + match=match) + + self.assertEqual(4, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=4, + registers=()), address_unwind) + + def testCfaAndRaChangePopOnly(self): + parser = PushOrSubSpParser() + match = parser.GetBreakpadInstructionsRegex().search( + '.cfa: sp 4 + .ra: .cfa -4 + ^') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=0, + match=match) + + self.assertEqual(4, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=0, + registers=(14, )), address_unwind) + + def testCfaAndRaChangePopAndSpUpdate(self): + parser = PushOrSubSpParser() + match = parser.GetBreakpadInstructionsRegex().search( + '.cfa: sp 8 + .ra: .cfa -4 + ^') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=0, + match=match) + + self.assertEqual(8, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=4, + registers=(14, )), address_unwind) + + def testCfaAndRaAndRegistersChangePopOnly(self): + parser = PushOrSubSpParser() + match = parser.GetBreakpadInstructionsRegex().search( + '.cfa: sp 12 + .ra: .cfa -4 + ^ r4: .cfa -12 + ^ r7: .cfa -8 + ^') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=0, + match=match) + + self.assertEqual(12, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=0, + registers=(4, 7, 14)), address_unwind) + + def testCfaAndRaAndRegistersChangePopAndSpUpdate(self): + parser = PushOrSubSpParser() + match = parser.GetBreakpadInstructionsRegex().search( + '.cfa: sp 16 + .ra: .cfa -4 + ^ r4: .cfa -12 + ^ r7: .cfa -8 + ^') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=0, + match=match) + + self.assertEqual(16, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=4, + registers=(4, 7, 14)), address_unwind) + + def testRegistersChange(self): + parser = PushOrSubSpParser() + match = parser.GetBreakpadInstructionsRegex().search( + 'r4: .cfa -8 + ^ r7: .cfa -4 + ^') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=0, + match=match) + + self.assertEqual(0, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=0, + registers=(4, 7)), address_unwind) + + def testCfaAndRegistersChange(self): + parser = PushOrSubSpParser() + match = parser.GetBreakpadInstructionsRegex().search( + '.cfa: sp 8 + r4: .cfa -8 + ^ r7: .cfa -4 + ^') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=0, + match=match) + + self.assertEqual(8, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=0, + registers=(4, 7)), address_unwind) + + def testRegistersOrdering(self): + parser = PushOrSubSpParser() + match = parser.GetBreakpadInstructionsRegex().search( + 'r10: .cfa -8 + ^ r7: .cfa -4 + ^') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=0, + match=match) + + self.assertEqual(0, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=0, + registers=(7, 10)), address_unwind) + + def testPoppingCallerSaveRegisters(self): + """Regression test for pop unwinds that encode caller-save registers. + + Callee-save registers: r0 ~ r3. + """ + parser = PushOrSubSpParser() + match = parser.GetBreakpadInstructionsRegex().search( + '.cfa: sp 16 + .ra: .cfa -4 + ^ ' + 'r3: .cfa -16 + ^ r4: .cfa -12 + ^ r5: .cfa -8 + ^') + + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=0, + match=match) + + self.assertEqual(16, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=4, + registers=(4, 5, 14)), address_unwind) + + +class _TestVPushParser(unittest.TestCase): + def testCfaAndRegistersChange(self): + parser = VPushParser() + match = parser.GetBreakpadInstructionsRegex().search( + '.cfa: sp 40 + unnamed_register264: .cfa -40 + ^ ' + 'unnamed_register265: .cfa -32 + ^') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=24, + match=match) + + self.assertEqual(40, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=16, + registers=()), address_unwind) + + def testRegistersChange(self): + parser = VPushParser() + match = parser.GetBreakpadInstructionsRegex().search( + 'unnamed_register264: .cfa -40 + ^ unnamed_register265: .cfa -32 + ^') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=24, + match=match) + + self.assertEqual(24, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.NO_ACTION, + sp_offset=0, + registers=()), address_unwind) + + +class _TestStoreSpParser(unittest.TestCase): + def testCfaAndRegistersChange(self): + parser = StoreSpParser() + match = parser.GetBreakpadInstructionsRegex().search('.cfa: r7 8 +') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=12, + match=match) + + self.assertEqual(8, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.RESTORE_SP_FROM_REGISTER, + sp_offset=-4, + registers=(7, )), address_unwind) + + +class _TestEncodeUnwindInstructionTable(unittest.TestCase): + def testSingleEntry(self): + table, offsets = EncodeUnwindInstructionTable([bytes([3])]) + + self.assertEqual(bytes([3]), table) + self.assertDictEqual({ + bytes([3]): 0, + }, offsets) + + def testMultipleEntries(self): + self.maxDiff = None + # Result should be sorted by score descending. + table, offsets = EncodeUnwindInstructionTable([ + bytes([1, 2, 3]), + bytes([0, 3]), + bytes([3]), + ]) + self.assertEqual(bytes([3, 0, 3, 1, 2, 3]), table) + self.assertDictEqual( + { + bytes([1, 2, 3]): 3, # score = 1 / 3 = 0.67 + bytes([0, 3]): 1, # score = 1 / 2 = 0.5 + bytes([3]): 0, # score = 1 / 1 = 1 + }, + offsets) + + # When scores are same, sort by sequence descending. + table, offsets = EncodeUnwindInstructionTable([ + bytes([3]), + bytes([0, 3]), + bytes([0, 3]), + bytes([1, 2, 3]), + bytes([1, 2, 3]), + bytes([1, 2, 3]), + ]) + self.assertEqual(bytes([3, 1, 2, 3, 0, 3]), table) + self.assertDictEqual( + { + bytes([3]): 0, # score = 1 / 1 = 1 + bytes([1, 2, 3]): 1, # score = 3 / 3 = 1 + bytes([0, 3]): 4, # score = 2 / 2 = 1 + }, + offsets) + + +class _TestFunctionOffsetTable(unittest.TestCase): + def testSingleEntry(self): + self.maxDiff = None + complete_instruction_sequence0 = bytes([3]) + complete_instruction_sequence1 = bytes([1, 3]) + + sequence1 = ( + EncodedAddressUnwind(0x400, complete_instruction_sequence1), + EncodedAddressUnwind(0x0, complete_instruction_sequence0), + ) + + address_unwind_sequences = [sequence1] + + table, offsets = EncodeFunctionOffsetTable( + address_unwind_sequences, { + complete_instruction_sequence0: 52, + complete_instruction_sequence1: 50, + }) + + self.assertEqual( + bytes([ + # (0x200, 50) + 128, + 4, + 50, + # (0, 52) + 0, + 52, + ]), + table) + + self.assertDictEqual({ + sequence1: 0, + }, offsets) + + def testMultipleEntry(self): + self.maxDiff = None + complete_instruction_sequence0 = bytes([3]) + complete_instruction_sequence1 = bytes([1, 3]) + complete_instruction_sequence2 = bytes([2, 3]) + + sequence1 = ( + EncodedAddressUnwind(0x20, complete_instruction_sequence1), + EncodedAddressUnwind(0x0, complete_instruction_sequence0), + ) + sequence2 = ( + EncodedAddressUnwind(0x400, complete_instruction_sequence2), + EncodedAddressUnwind(0x0, complete_instruction_sequence0), + ) + address_unwind_sequences = [sequence1, sequence2] + + table, offsets = EncodeFunctionOffsetTable( + address_unwind_sequences, { + complete_instruction_sequence0: 52, + complete_instruction_sequence1: 50, + complete_instruction_sequence2: 80, + }) + + self.assertEqual( + bytes([ + # (0x10, 50) + 0x10, + 50, + # (0, 52) + 0, + 52, + # (0x200, 80) + 128, + 4, + 80, + # (0, 52) + 0, + 52, + ]), + table) + + self.assertDictEqual({ + sequence1: 0, + sequence2: 4, + }, offsets) + + def testDuplicatedEntry(self): + self.maxDiff = None + complete_instruction_sequence0 = bytes([3]) + complete_instruction_sequence1 = bytes([1, 3]) + complete_instruction_sequence2 = bytes([2, 3]) + + sequence1 = ( + EncodedAddressUnwind(0x20, complete_instruction_sequence1), + EncodedAddressUnwind(0x0, complete_instruction_sequence0), + ) + sequence2 = ( + EncodedAddressUnwind(0x400, complete_instruction_sequence2), + EncodedAddressUnwind(0x0, complete_instruction_sequence0), + ) + sequence3 = sequence1 + + address_unwind_sequences = [sequence1, sequence2, sequence3] + + table, offsets = EncodeFunctionOffsetTable( + address_unwind_sequences, { + complete_instruction_sequence0: 52, + complete_instruction_sequence1: 50, + complete_instruction_sequence2: 80, + }) + + self.assertEqual( + bytes([ + # (0x10, 50) + 0x10, + 50, + # (0, 52) + 0, + 52, + # (0x200, 80) + 128, + 4, + 80, + # (0, 52) + 0, + 52, + ]), + table) + + self.assertDictEqual({ + sequence1: 0, + sequence2: 4, + }, offsets) + + +class _TestEncodePageTableAndFunctionTable(unittest.TestCase): + def testMultipleFunctionUnwinds(self): + address_unwind_sequence0 = ( + EncodedAddressUnwind(0x10, bytes([0, 3])), + EncodedAddressUnwind(0x0, bytes([3])), + ) + address_unwind_sequence1 = ( + EncodedAddressUnwind(0x10, bytes([1, 3])), + EncodedAddressUnwind(0x0, bytes([3])), + ) + address_unwind_sequence2 = ( + EncodedAddressUnwind(0x200, bytes([2, 3])), + EncodedAddressUnwind(0x0, bytes([3])), + ) + + function_unwinds = [ + EncodedFunctionUnwind(page_number=0, + page_offset=0, + address_unwinds=address_unwind_sequence0), + EncodedFunctionUnwind(page_number=0, + page_offset=0x8000, + address_unwinds=address_unwind_sequence1), + EncodedFunctionUnwind(page_number=1, + page_offset=0x8000, + address_unwinds=address_unwind_sequence2), + ] + + function_offset_table_offsets = { + address_unwind_sequence0: 0x100, + address_unwind_sequence1: 0x200, + address_unwind_sequence2: 0x300, + } + + page_table, function_table = EncodePageTableAndFunctionTable( + function_unwinds, function_offset_table_offsets) + + self.assertEqual(2 * 4, len(page_table)) + self.assertEqual((0, 2), struct.unpack('2I', page_table)) + + self.assertEqual(6 * 2, len(function_table)) + self.assertEqual((0, 0x100, 0x8000, 0x200, 0x8000, 0x300), + struct.unpack('6H', function_table)) + + def testMultiPageFunction(self): + address_unwind_sequence0 = ( + EncodedAddressUnwind(0x10, bytes([0, 3])), + EncodedAddressUnwind(0x0, bytes([3])), + ) + address_unwind_sequence1 = ( + EncodedAddressUnwind(0x10, bytes([1, 3])), + EncodedAddressUnwind(0x0, bytes([3])), + ) + address_unwind_sequence2 = ( + EncodedAddressUnwind(0x200, bytes([2, 3])), + EncodedAddressUnwind(0x0, bytes([3])), + ) + + function_unwinds = [ + EncodedFunctionUnwind(page_number=0, + page_offset=0, + address_unwinds=address_unwind_sequence0), + # Large function. + EncodedFunctionUnwind(page_number=0, + page_offset=0x8000, + address_unwinds=address_unwind_sequence1), + EncodedFunctionUnwind(page_number=4, + page_offset=0x8000, + address_unwinds=address_unwind_sequence2), + ] + + function_offset_table_offsets = { + address_unwind_sequence0: 0x100, + address_unwind_sequence1: 0x200, + address_unwind_sequence2: 0x300, + } + + page_table, function_table = EncodePageTableAndFunctionTable( + function_unwinds, function_offset_table_offsets) + + self.assertEqual(5 * 4, len(page_table)) + self.assertEqual((0, 2, 2, 2, 2), struct.unpack('5I', page_table)) + + self.assertEqual(6 * 2, len(function_table)) + self.assertEqual((0, 0x100, 0x8000, 0x200, 0x8000, 0x300), + struct.unpack('6H', function_table)) + + +class MockReturnParser(UnwindInstructionsParser): + def GetBreakpadInstructionsRegex(self): + return re.compile(r'^RETURN$') + + def ParseFromMatch(self, address_offset, cfa_sp_offset, match): + return AddressUnwind(address_offset, UnwindType.RETURN_TO_LR, 0, ()), 0 + + +class MockEpilogueUnwindParser(UnwindInstructionsParser): + def GetBreakpadInstructionsRegex(self): + return re.compile(r'^EPILOGUE_UNWIND$') + + def ParseFromMatch(self, address_offset, cfa_sp_offset, match): + return AddressUnwind(address_offset, + UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, 0, ()), -100 + + +class MockWildcardParser(UnwindInstructionsParser): + def GetBreakpadInstructionsRegex(self): + return re.compile(r'.*') + + def ParseFromMatch(self, address_offset, cfa_sp_offset, match): + return AddressUnwind(address_offset, + UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, 0, ()), -200 + + +class _TestParseAddressCfi(unittest.TestCase): + def testSuccessParse(self): + address_unwind = AddressUnwind( + address_offset=0x300, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=(), + ) + + self.assertEqual((address_unwind, False, 0), + ParseAddressCfi(AddressCfi(address=0x800, + unwind_instructions='RETURN'), + function_start_address=0x500, + parsers=(MockReturnParser(), ), + prev_cfa_sp_offset=0)) + + def testUnhandledAddress(self): + self.assertEqual((None, False, 100), + ParseAddressCfi(AddressCfi(address=0x800, + unwind_instructions='UNKNOWN'), + function_start_address=0x500, + parsers=(MockReturnParser(), ), + prev_cfa_sp_offset=100)) + + def testEpilogueUnwind(self): + self.assertEqual( + (None, True, -100), + ParseAddressCfi(AddressCfi(address=0x800, + unwind_instructions='EPILOGUE_UNWIND'), + function_start_address=0x500, + parsers=(MockEpilogueUnwindParser(), ), + prev_cfa_sp_offset=100)) + + def testParsePrecedence(self): + address_unwind = AddressUnwind( + address_offset=0x300, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=(), + ) + + self.assertEqual( + (address_unwind, False, 0), + ParseAddressCfi(AddressCfi(address=0x800, unwind_instructions='RETURN'), + function_start_address=0x500, + parsers=(MockReturnParser(), MockWildcardParser()), + prev_cfa_sp_offset=0)) + + +class _TestGenerateUnwinds(unittest.TestCase): + def testSuccessUnwind(self): + self.assertEqual( + [ + FunctionUnwind(address=0x100, + size=1024, + address_unwinds=( + AddressUnwind( + address_offset=0x0, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=(), + ), + AddressUnwind( + address_offset=0x200, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=(), + ), + )) + ], + list( + GenerateUnwinds([ + FunctionCfi( + size=1024, + address_cfi=( + AddressCfi(address=0x100, unwind_instructions='RETURN'), + AddressCfi(address=0x300, unwind_instructions='RETURN'), + )) + ], + parsers=[MockReturnParser()]))) + + def testUnhandledAddress(self): + self.assertEqual( + [ + FunctionUnwind(address=0x100, + size=1024, + address_unwinds=(AddressUnwind( + address_offset=0x0, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=(), + ), )) + ], + list( + GenerateUnwinds([ + FunctionCfi(size=1024, + address_cfi=( + AddressCfi(address=0x100, + unwind_instructions='RETURN'), + AddressCfi(address=0x300, + unwind_instructions='UNKNOWN'), + )) + ], + parsers=[MockReturnParser()]))) + + def testEpilogueUnwind(self): + self.assertEqual( + [ + FunctionUnwind(address=0x100, + size=1024, + address_unwinds=(AddressUnwind( + address_offset=0x0, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=(), + ), )) + ], + list( + GenerateUnwinds([ + FunctionCfi( + size=1024, + address_cfi=( + AddressCfi(address=0x100, unwind_instructions='RETURN'), + AddressCfi(address=0x300, + unwind_instructions='EPILOGUE_UNWIND'), + )) + ], + parsers=[ + MockReturnParser(), + MockEpilogueUnwindParser() + ]))) + + def testInvalidInitialUnwindInstructionAsserts(self): + self.assertRaises( + AssertionError, lambda: list( + GenerateUnwinds([ + FunctionCfi(size=1024, + address_cfi=( + AddressCfi(address=0x100, + unwind_instructions='UNKNOWN'), + AddressCfi(address=0x200, + unwind_instructions='RETURN'), + )) + ], + parsers=[MockReturnParser()]))) + + +class _TestEncodeUnwindInfo(unittest.TestCase): + def testEncodeTables(self): + page_table = struct.pack('I', 0) + function_table = struct.pack('4H', 1, 2, 3, 4) + function_offset_table = bytes([1, 2]) + unwind_instruction_table = bytes([1, 2, 3]) + + unwind_info = EncodeUnwindInfo( + page_table, + function_table, + function_offset_table, + unwind_instruction_table, + ) + + self.assertEqual( + 32 + len(page_table) + len(function_table) + + len(function_offset_table) + len(unwind_instruction_table), + len(unwind_info)) + # Header. + self.assertEqual((32, 1, 36, 2, 44, 2, 46, 3), + struct.unpack('8I', unwind_info[:32])) + # Body. + self.assertEqual( + page_table + function_table + function_offset_table + + unwind_instruction_table, unwind_info[32:]) + + def testUnalignedTables(self): + self.assertRaises( + AssertionError, lambda: EncodeUnwindInfo(bytes([1]), b'', b'', b'')) + self.assertRaises( + AssertionError, lambda: EncodeUnwindInfo(b'', bytes([1]), b'', b'')) + + +class _TestGenerateUnwindTables(unittest.TestCase): + def testGenerateUnwindTables(self): + """This is an integration test that hooks everything together. """ + address_unwind_sequence0 = ( + EncodedAddressUnwind(0x20, bytes([0, 0xb0])), + EncodedAddressUnwind(0x0, bytes([0xb0])), + ) + address_unwind_sequence1 = ( + EncodedAddressUnwind(0x20, bytes([1, 0xb0])), + EncodedAddressUnwind(0x0, bytes([0xb0])), + ) + address_unwind_sequence2 = ( + EncodedAddressUnwind(0x200, bytes([2, 0xb0])), + EncodedAddressUnwind(0x0, bytes([0xb0])), + ) + + (page_table, function_table, function_offset_table, + unwind_instruction_table) = GenerateUnwindTables([ + EncodedFunctionUnwind(page_number=0, + page_offset=0, + address_unwinds=TRIVIAL_UNWIND), + EncodedFunctionUnwind(page_number=0, + page_offset=0x1000, + address_unwinds=address_unwind_sequence0), + EncodedFunctionUnwind(page_number=1, + page_offset=0x2000, + address_unwinds=address_unwind_sequence1), + EncodedFunctionUnwind(page_number=3, + page_offset=0x1000, + address_unwinds=address_unwind_sequence2), + ]) + + # Complete instruction sequences and their frequencies. + # [0xb0]: 4 + # [0, 0xb0]: 1 + # [1, 0xb0]: 1 + # [2, 0xb0]: 1 + self.assertEqual(bytes([0xb0, 2, 0xb0, 1, 0xb0, 0, 0xb0]), + unwind_instruction_table) + + self.assertEqual( + bytes([ + # Trivial unwind. + 0, + 0, + # Address unwind sequence 0. + 0x10, + 5, + 0, + 0, + # Address unwind sequence 1. + 0x10, + 3, + 0, + 0, + # Address unwind sequence 2. + 0x80, + 2, + 1, + 0, + 0, + ]), + function_offset_table) + + self.assertEqual(8 * 2, len(function_table)) + self.assertEqual((0, 0, 0x1000, 2, 0x2000, 6, 0x1000, 10), + struct.unpack('8H', function_table)) + + self.assertEqual(4 * 4, len(page_table)) + self.assertEqual((0, 2, 3, 3), struct.unpack('4I', page_table)) diff --git a/build/android/gyp/desugar.py b/build/android/gyp/desugar.py deleted file mode 100755 index 87eb1590a5f7..000000000000 --- a/build/android/gyp/desugar.py +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2017 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import argparse -import os -import sys - -from util import build_utils - - -def main(): - args = build_utils.ExpandFileArgs(sys.argv[1:]) - parser = argparse.ArgumentParser() - build_utils.AddDepfileOption(parser) - parser.add_argument('--desugar-jar', required=True, - help='Path to Desugar.jar.') - parser.add_argument('--input-jar', required=True, - help='Jar input path to include .class files from.') - parser.add_argument('--output-jar', required=True, - help='Jar output path.') - parser.add_argument('--classpath', - action='append', - required=True, - help='Classpath.') - parser.add_argument('--bootclasspath', required=True, - help='Path to javac bootclasspath interface jar.') - parser.add_argument('--warnings-as-errors', - action='store_true', - help='Treat all warnings as errors.') - options = parser.parse_args(args) - - options.bootclasspath = build_utils.ParseGnList(options.bootclasspath) - options.classpath = build_utils.ParseGnList(options.classpath) - - cmd = build_utils.JavaCmd(options.warnings_as_errors) + [ - '-jar', - options.desugar_jar, - '--input', - options.input_jar, - '--output', - options.output_jar, - '--generate_base_classes_for_default_methods', - # Don't include try-with-resources files in every .jar. Instead, they - # are included via //third_party/bazel/desugar:desugar_runtime_java. - '--desugar_try_with_resources_omit_runtime_classes', - ] - for path in options.bootclasspath: - cmd += ['--bootclasspath_entry', path] - for path in options.classpath: - cmd += ['--classpath_entry', path] - build_utils.CheckOutput( - cmd, - print_stdout=False, - stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings, - fail_on_output=options.warnings_as_errors) - - if options.depfile: - build_utils.WriteDepfile(options.depfile, - options.output_jar, - inputs=options.bootclasspath + options.classpath) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/build/android/gyp/dex.py b/build/android/gyp/dex.py index 9c6fb98e6be2..a7f024a277d5 100755 --- a/build/android/gyp/dex.py +++ b/build/android/gyp/dex.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -10,40 +10,58 @@ import os import re import shutil +import shlex import sys import tempfile import zipfile from util import build_utils from util import md5_check -from util import zipalign +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers -sys.path.insert(1, os.path.join(os.path.dirname(__file__), os.path.pardir)) - -import convert_dex_profile +_DEX_XMX = '2G' # Increase this when __final_dex OOMs. _IGNORE_WARNINGS = ( - # Caused by Play Services: - r'Type `libcore.io.Memory` was not found', - # Caused by a missing final class in flogger: - r'Type `dalvik.system.VMStack` was not found', - # Caused by jacoco code coverage: - r'Type `java.lang.management.ManagementFactory` was not found', - # TODO(wnwen): Remove this after R8 version 3.0.26-dev: - r'Missing class sun.misc.Unsafe', - # Caused when the test apk and the apk under test do not having native libs. + # Warning: Running R8 version main (build engineering), which cannot be + # represented as a semantic version. Using an artificial version newer than + # any known version for selecting Proguard configurations embedded under + # META-INF/. This means that all rules with a '-upto-' qualifier will be + # excluded and all rules with a -from- qualifier will be included. + r'Running R8 version main', + # E.g. Triggers for weblayer_instrumentation_test_apk since both it and its + # apk_under_test have no shared_libraries. + # https://crbug.com/1364192 << To fix this in a better way. r'Missing class org.chromium.build.NativeLibraries', - # Caused by internal annotation: https://crbug.com/1180222 - r'Missing class com.google.errorprone.annotations.RestrictedInheritance', # Caused by internal protobuf package: https://crbug.com/1183971 r'referenced from: com.google.protobuf.GeneratedMessageLite$GeneratedExtension', # pylint: disable=line-too-long - # Caused by using Bazel desugar instead of D8 for desugar, since Bazel - # desugar doesn't preserve interfaces in the same way. This should be - # removed when D8 is used for desugaring. - r'Warning: Cannot emulate interface ', - # Only relevant for R8 when optimizing an app that doesn't use proto. + # Desugaring configs may occasionally not match types in our program. This + # may happen temporarily until we move over to the new desugared library + # json flags. See crbug.com/1302088 - this should be removed when this bug + # is fixed. + r'Warning: Specification conversion: The following', + # Caused by protobuf runtime using -identifiernamestring in a way that + # doesn't work with R8. Looks like: + # Rule matches the static final field `...`, which may have been inlined... + # com.google.protobuf.*GeneratedExtensionRegistryLite { + # static java.lang.String CONTAINING_TYPE_*; + # } + r'GeneratedExtensionRegistryLite.CONTAINING_TYPE_', + # Relevant for R8 when optimizing an app that doesn't use protobuf. r'Ignoring -shrinkunusedprotofields since the protobuf-lite runtime is', + # Ignore Unused Rule Warnings in third_party libraries. + r'/third_party/.*Proguard configuration rule does not match anything', + # Ignore Unused Rule Warnings for system classes (aapt2 generates these). + r'Proguard configuration rule does not match anything:.*class android\.', + # TODO(crbug.com/1303951): Don't ignore all such warnings. + r'Proguard configuration rule does not match anything:', + # TODO(agrieve): Remove once we update to U SDK. + r'OnBackAnimationCallback', +) + +_SKIPPED_CLASS_FILE_NAMES = ( + 'module-info.class', # Explicitly skipped by r8/utils/FileUtils#isClassFile ) @@ -51,7 +69,7 @@ def _ParseArgs(args): args = build_utils.ExpandFileArgs(args) parser = argparse.ArgumentParser() - build_utils.AddDepfileOption(parser) + action_helpers.add_depfile_arg(parser) parser.add_argument('--output', required=True, help='Dex output path.') parser.add_argument( '--class-inputs', @@ -96,8 +114,6 @@ def _ParseArgs(args): '--bootclasspath', action='append', help='GN-list of bootclasspath. Needed for --desugar') - parser.add_argument( - '--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.') parser.add_argument('--show-desugar-default-interface-warnings', action='store_true', help='Enable desugaring warnings.') @@ -116,6 +132,8 @@ def _ParseArgs(args): parser.add_argument('--force-enable-assertions', action='store_true', help='Forcefully enable javac generated assertion code.') + parser.add_argument('--assertion-handler', + help='The class name of the assertion handler class.') parser.add_argument('--warnings-as-errors', action='store_true', help='Treat all warnings as errors.') @@ -123,47 +141,22 @@ def _ParseArgs(args): action='store_true', help='Use when filing D8 bugs to capture inputs.' ' Stores inputs to d8inputs.zip') - - group = parser.add_argument_group('Dexlayout') - group.add_argument( - '--dexlayout-profile', - help=('Text profile for dexlayout. If present, a dexlayout ' - 'pass will happen')) - group.add_argument( - '--profman-path', - help=('Path to ART profman binary. There should be a lib/ directory at ' - 'the same path with shared libraries (shared with dexlayout).')) - group.add_argument( - '--dexlayout-path', - help=('Path to ART dexlayout binary. There should be a lib/ directory at ' - 'the same path with shared libraries (shared with dexlayout).')) - group.add_argument('--dexdump-path', help='Path to dexdump binary.') - group.add_argument( - '--proguard-mapping-path', - help=('Path to proguard map from obfuscated symbols in the jar to ' - 'unobfuscated symbols present in the code. If not present, the jar ' - 'is assumed not to be obfuscated.')) - options = parser.parse_args(args) - if options.dexlayout_profile: - build_utils.CheckOptions( - options, - parser, - required=('profman_path', 'dexlayout_path', 'dexdump_path')) - elif options.proguard_mapping_path is not None: - parser.error('Unexpected proguard mapping without dexlayout') - if options.main_dex_rules_path and not options.multi_dex: parser.error('--main-dex-rules-path is unused if multidex is not enabled') - options.class_inputs = build_utils.ParseGnList(options.class_inputs) - options.class_inputs_filearg = build_utils.ParseGnList( + if options.force_enable_assertions and options.assertion_handler: + parser.error('Cannot use both --force-enable-assertions and ' + '--assertion-handler') + + options.class_inputs = action_helpers.parse_gn_list(options.class_inputs) + options.class_inputs_filearg = action_helpers.parse_gn_list( options.class_inputs_filearg) - options.bootclasspath = build_utils.ParseGnList(options.bootclasspath) - options.classpath = build_utils.ParseGnList(options.classpath) - options.dex_inputs = build_utils.ParseGnList(options.dex_inputs) - options.dex_inputs_filearg = build_utils.ParseGnList( + options.bootclasspath = action_helpers.parse_gn_list(options.bootclasspath) + options.classpath = action_helpers.parse_gn_list(options.classpath) + options.dex_inputs = action_helpers.parse_gn_list(options.dex_inputs) + options.dex_inputs_filearg = action_helpers.parse_gn_list( options.dex_inputs_filearg) return options @@ -171,28 +164,27 @@ def _ParseArgs(args): def CreateStderrFilter(show_desugar_default_interface_warnings): def filter_stderr(output): + # Set this when debugging R8 output. + if os.environ.get('R8_SHOW_ALL_OUTPUT', '0') != '0': + return output + + warnings = re.split(r'^(?=Warning|Error)', output, flags=re.MULTILINE) + preamble, *warnings = warnings + patterns = list(_IGNORE_WARNINGS) - # When using Bazel's Desugar tool to desugar lambdas and interface methods, - # we do not provide D8 with a classpath, which causes a lot of warnings from - # D8's default interface desugaring pass. Not having a classpath makes - # incremental dexing much more effective. D8 still does backported method - # desugaring. - # These warnings are also turned off when bytecode checks are turned off. + # Missing deps can happen for prebuilts that are missing transitive deps + # and have set enable_bytecode_checks=false. if not show_desugar_default_interface_warnings: patterns += ['default or static interface methods'] combined_pattern = '|'.join(re.escape(p) for p in patterns) - output = build_utils.FilterLines(output, combined_pattern) + preamble = build_utils.FilterLines(preamble, combined_pattern) + + compiled_re = re.compile(combined_pattern, re.DOTALL) + warnings = [w for w in warnings if not compiled_re.search(w)] - # Each warning has a prefix line of the file it's from. If we've filtered - # out the warning, then also filter out the file header. - # E.g.: - # Warning in path/to/Foo.class: - # Error message #1 indented here. - # Error message #2 indented here. - output = re.sub(r'^Warning in .*?:\n(?! )', '', output, flags=re.MULTILINE) - return output + return preamble + ''.join(warnings) return filter_stderr @@ -203,142 +195,35 @@ def _RunD8(dex_cmd, input_paths, output_path, warnings_as_errors, stderr_filter = CreateStderrFilter(show_desugar_default_interface_warnings) - with tempfile.NamedTemporaryFile(mode='w') as flag_file: + is_debug = logging.getLogger().isEnabledFor(logging.DEBUG) + + # Avoid deleting the flag file when DEX_DEBUG is set in case the flag file + # needs to be examined after the build. + with tempfile.NamedTemporaryFile(mode='w', delete=not is_debug) as flag_file: # Chosen arbitrarily. Needed to avoid command-line length limits. MAX_ARGS = 50 + orig_dex_cmd = dex_cmd if len(dex_cmd) > MAX_ARGS: - flag_file.write('\n'.join(dex_cmd[MAX_ARGS:])) - flag_file.flush() - dex_cmd = dex_cmd[:MAX_ARGS] - dex_cmd.append('@' + flag_file.name) + # Add all flags to D8 (anything after the first --) as well as all + # positional args at the end to the flag file. + for idx, cmd in enumerate(dex_cmd): + if cmd.startswith('--'): + flag_file.write('\n'.join(dex_cmd[idx:])) + flag_file.flush() + dex_cmd = dex_cmd[:idx] + dex_cmd.append('@' + flag_file.name) + break # stdout sometimes spams with things like: # Stripped invalid locals information from 1 method. - build_utils.CheckOutput(dex_cmd, - stderr_filter=stderr_filter, - fail_on_output=warnings_as_errors) - - -def _EnvWithArtLibPath(binary_path): - """Return an environment dictionary for ART host shared libraries. - - Args: - binary_path: the path to an ART host binary. - - Returns: - An environment dictionary where LD_LIBRARY_PATH has been augmented with the - shared library path for the binary. This assumes that there is a lib/ - directory in the same location as the binary. - """ - lib_path = os.path.join(os.path.dirname(binary_path), 'lib') - env = os.environ.copy() - libraries = [l for l in env.get('LD_LIBRARY_PATH', '').split(':') if l] - libraries.append(lib_path) - env['LD_LIBRARY_PATH'] = ':'.join(libraries) - return env - - -def _CreateBinaryProfile(text_profile, input_dex, profman_path, temp_dir): - """Create a binary profile for dexlayout. - - Args: - text_profile: The ART text profile that will be converted to a binary - profile. - input_dex: The input dex file to layout. - profman_path: Path to the profman binary. - temp_dir: Directory to work in. - - Returns: - The name of the binary profile, which will live in temp_dir. - """ - binary_profile = os.path.join( - temp_dir, 'binary_profile-for-' + os.path.basename(text_profile)) - open(binary_profile, 'w').close() # Touch binary_profile. - profman_cmd = [profman_path, - '--apk=' + input_dex, - '--dex-location=' + input_dex, - '--create-profile-from=' + text_profile, - '--reference-profile-file=' + binary_profile] - build_utils.CheckOutput( - profman_cmd, - env=_EnvWithArtLibPath(profman_path), - stderr_filter=lambda output: - build_utils.FilterLines(output, '|'.join( - [r'Could not find (method_id|proto_id|name):', - r'Could not create type list']))) - return binary_profile - - -def _LayoutDex(binary_profile, input_dex, dexlayout_path, temp_dir): - """Layout a dexfile using a profile. - - Args: - binary_profile: An ART binary profile, eg output from _CreateBinaryProfile. - input_dex: The dex file used to create the binary profile. - dexlayout_path: Path to the dexlayout binary. - temp_dir: Directory to work in. - - Returns: - List of output files produced by dexlayout. This will be one if the input - was a single dexfile, or multiple files if the input was a multidex - zip. These output files are located in temp_dir. - """ - dexlayout_output_dir = os.path.join(temp_dir, 'dexlayout_output') - os.mkdir(dexlayout_output_dir) - dexlayout_cmd = [ dexlayout_path, - '-u', # Update checksum - '-p', binary_profile, - '-w', dexlayout_output_dir, - input_dex ] - build_utils.CheckOutput( - dexlayout_cmd, - env=_EnvWithArtLibPath(dexlayout_path), - stderr_filter=lambda output: - build_utils.FilterLines(output, - r'Can.t mmap dex file.*please zipalign')) - output_files = os.listdir(dexlayout_output_dir) - if not output_files: - raise Exception('dexlayout unexpectedly produced no output') - return sorted([os.path.join(dexlayout_output_dir, f) for f in output_files]) - - -def _ZipMultidex(file_dir, dex_files): - """Zip dex files into a multidex. - - Args: - file_dir: The directory into which to write the output. - dex_files: The dexfiles forming the multizip. Their names must end with - classes.dex, classes2.dex, ... - - Returns: - The name of the multidex file, which will live in file_dir. - """ - ordered_files = [] # List of (archive name, file name) - for f in dex_files: - if f.endswith('dex.jar'): - ordered_files.append(('classes.dex', f)) - break - if not ordered_files: - raise Exception('Could not find classes.dex multidex file in %s', - dex_files) - for dex_idx in xrange(2, len(dex_files) + 1): - archive_name = 'classes%d.dex' % dex_idx - for f in dex_files: - if f.endswith(archive_name): - ordered_files.append((archive_name, f)) - break - else: - raise Exception('Could not find classes%d.dex multidex file in %s', - dex_files) - if len(set(f[1] for f in ordered_files)) != len(ordered_files): - raise Exception('Unexpected clashing filenames for multidex in %s', - dex_files) - - zip_name = os.path.join(file_dir, 'multidex_classes.zip') - build_utils.DoZip(((archive_name, os.path.join(file_dir, file_name)) - for archive_name, file_name in ordered_files), - zip_name) - return zip_name + try: + build_utils.CheckOutput(dex_cmd, + stderr_filter=stderr_filter, + fail_on_output=warnings_as_errors) + except Exception: + if orig_dex_cmd is not dex_cmd: + sys.stderr.write('Full command: ' + shlex.join(orig_dex_cmd) + '\n') + raise def _ZipAligned(dex_files, output_path): @@ -351,31 +236,7 @@ def _ZipAligned(dex_files, output_path): with zipfile.ZipFile(output_path, 'w') as z: for i, dex_file in enumerate(dex_files): name = 'classes{}.dex'.format(i + 1 if i > 0 else '') - zipalign.AddToZipHermetic(z, name, src_path=dex_file, alignment=4) - - -def _PerformDexlayout(tmp_dir, tmp_dex_output, options): - if options.proguard_mapping_path is not None: - matching_profile = os.path.join(tmp_dir, 'obfuscated_profile') - convert_dex_profile.ObfuscateProfile( - options.dexlayout_profile, tmp_dex_output, - options.proguard_mapping_path, options.dexdump_path, matching_profile) - else: - logging.warning('No obfuscation for %s', options.dexlayout_profile) - matching_profile = options.dexlayout_profile - binary_profile = _CreateBinaryProfile(matching_profile, tmp_dex_output, - options.profman_path, tmp_dir) - output_files = _LayoutDex(binary_profile, tmp_dex_output, - options.dexlayout_path, tmp_dir) - if len(output_files) > 1: - return _ZipMultidex(tmp_dir, output_files) - - if zipfile.is_zipfile(output_files[0]): - return output_files[0] - - final_output = os.path.join(tmp_dir, 'dex_classes.zip') - _ZipAligned(output_files, final_output) - return final_output + zip_helpers.add_to_zip_hermetic(z, name, src_path=dex_file, alignment=4) def _CreateFinalDex(d8_inputs, output, tmp_dir, dex_cmd, options=None): @@ -408,9 +269,6 @@ def _CreateFinalDex(d8_inputs, output, tmp_dir, dex_cmd, options=None): _ZipAligned(sorted(d8_inputs), tmp_dex_output) logging.debug('Quick-zipped %d files', len(d8_inputs)) - if options and options.dexlayout_profile: - tmp_dex_output = _PerformDexlayout(tmp_dir, tmp_dex_output, options) - # The dex file is complete and can be moved out of tmp_dir. shutil.move(tmp_dex_output, output) @@ -421,7 +279,7 @@ def _IntermediateDexFilePathsFromInputJars(class_inputs, incremental_dir): for jar in class_inputs: with zipfile.ZipFile(jar, 'r') as z: for subpath in z.namelist(): - if subpath.endswith('.class'): + if _IsClassFile(subpath): subpath = subpath[:-5] + 'dex' dex_files.append(os.path.join(incremental_dir, subpath)) return dex_files @@ -437,12 +295,34 @@ def _DeleteStaleIncrementalDexFiles(dex_dir, dex_files): def _ParseDesugarDeps(desugar_dependencies_file): + # pylint: disable=line-too-long + """Returns a dict of dependent/dependency mapping parsed from the file. + + Example file format: + $ tail out/Debug/gen/base/base_java__dex.desugardeps + org/chromium/base/task/SingleThreadTaskRunnerImpl.class + <- org/chromium/base/task/SingleThreadTaskRunner.class + <- org/chromium/base/task/TaskRunnerImpl.class + org/chromium/base/task/TaskRunnerImpl.class + <- org/chromium/base/task/TaskRunner.class + org/chromium/base/task/TaskRunnerImplJni$1.class + <- obj/base/jni_java.turbine.jar:org/chromium/base/JniStaticTestMocker.class + org/chromium/base/task/TaskRunnerImplJni.class + <- org/chromium/base/task/TaskRunnerImpl$Natives.class + """ + # pylint: enable=line-too-long dependents_from_dependency = collections.defaultdict(set) if desugar_dependencies_file and os.path.exists(desugar_dependencies_file): with open(desugar_dependencies_file, 'r') as f: + dependent = None for line in f: - dependent, dependency = line.rstrip().split(' -> ') - dependents_from_dependency[dependency].add(dependent) + line = line.rstrip() + if line.startswith(' <- '): + dependency = line[len(' <- '):] + # Note that this is a reversed mapping from the one in CustomD8.java. + dependents_from_dependency[dependency].add(dependent) + else: + dependent = line return dependents_from_dependency @@ -463,15 +343,21 @@ def _ComputeRequiredDesugarClasses(changes, desugar_dependencies_file, return required_classes +def _IsClassFile(path): + if os.path.basename(path) in _SKIPPED_CLASS_FILE_NAMES: + return False + return path.endswith('.class') + + def _ExtractClassFiles(changes, tmp_dir, class_inputs, required_classes_set): classes_list = [] for jar in class_inputs: if changes: changed_class_list = (set(changes.IterChangedSubpaths(jar)) | required_classes_set) - predicate = lambda x: x in changed_class_list and x.endswith('.class') + predicate = lambda x: x in changed_class_list and _IsClassFile(x) else: - predicate = lambda x: x.endswith('.class') + predicate = _IsClassFile classes_list.extend( build_utils.ExtractAll(jar, path=tmp_dir, predicate=predicate)) @@ -496,14 +382,14 @@ def _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd): strings_changed, non_direct_input_changed) changes = None - if changes: + if changes is None: + required_desugar_classes_set = set() + else: required_desugar_classes_set = _ComputeRequiredDesugarClasses( changes, options.desugar_dependencies, options.class_inputs, options.classpath) logging.debug('Class files needing re-desugar: %d', len(required_desugar_classes_set)) - else: - required_desugar_classes_set = set() class_files = _ExtractClassFiles(changes, tmp_extract_dir, options.class_inputs, required_desugar_classes_set) @@ -514,7 +400,13 @@ def _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd): # Dex necessary classes into intermediate dex files. dex_cmd = dex_cmd + ['--intermediate', '--file-per-class-file'] if options.desugar_dependencies and not options.skip_custom_d8: - dex_cmd += ['--file-tmp-prefix', tmp_extract_dir] + # Adding os.sep to remove the entire prefix. + dex_cmd += ['--file-tmp-prefix', tmp_extract_dir + os.sep] + if changes is None and os.path.exists(options.desugar_dependencies): + # Since incremental dexing only ever adds to the desugar_dependencies + # file, whenever full dexes are required the .desugardeps files need to + # be manually removed. + os.unlink(options.desugar_dependencies) _RunD8(dex_cmd, class_files, options.incremental_dir, options.warnings_as_errors, options.show_desugar_default_interface_warnings) @@ -539,7 +431,7 @@ def _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd): def MergeDexForIncrementalInstall(r8_jar_path, src_paths, dest_dex_jar, min_api): - dex_cmd = build_utils.JavaCmd(verify=False) + [ + dex_cmd = build_utils.JavaCmd(xmx=_DEX_XMX) + [ '-cp', r8_jar_path, 'com.android.tools.r8.D8', @@ -577,7 +469,7 @@ def main(args): final_dex_inputs = list(options.class_inputs) final_dex_inputs += options.dex_inputs - dex_cmd = build_utils.JavaCmd(options.warnings_as_errors) + dex_cmd = build_utils.JavaCmd(xmx=_DEX_XMX) if options.dump_inputs: dex_cmd += ['-Dcom.android.tools.r8.dumpinputtofile=d8inputs.zip'] @@ -624,8 +516,8 @@ def main(args): input_paths += options.bootclasspath - if options.desugar_jdk_libs_json: - dex_cmd += ['--desugared-lib', options.desugar_jdk_libs_json] + if options.assertion_handler: + dex_cmd += ['--force-assertions-handler:' + options.assertion_handler] if options.force_enable_assertions: dex_cmd += ['--force-enable-assertions'] @@ -635,7 +527,7 @@ def main(args): lambda changes: _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd), options, input_paths=input_paths, - input_strings=dex_cmd + [bool(options.incremental_dir)], + input_strings=dex_cmd + [str(bool(options.incremental_dir))], output_paths=output_paths, pass_changes=True, track_subpaths_allowlist=track_subpaths_allowlist, diff --git a/build/android/gyp/dex.pydeps b/build/android/gyp/dex.pydeps index 23856f3c847a..d920e24617e3 100644 --- a/build/android/gyp/dex.pydeps +++ b/build/android/gyp/dex.pydeps @@ -1,10 +1,10 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dex.pydeps build/android/gyp/dex.py +../../action_helpers.py ../../gn_helpers.py ../../print_python_deps.py -../convert_dex_profile.py +../../zip_helpers.py dex.py util/__init__.py util/build_utils.py util/md5_check.py -util/zipalign.py diff --git a/build/android/gyp/dex_jdk_libs.py b/build/android/gyp/dex_jdk_libs.py deleted file mode 100755 index 63047791043d..000000000000 --- a/build/android/gyp/dex_jdk_libs.py +++ /dev/null @@ -1,93 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2020 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import argparse -import os -import shutil -import subprocess -import sys -import zipfile - -from util import build_utils - - -def _ParseArgs(args): - args = build_utils.ExpandFileArgs(args) - parser = argparse.ArgumentParser() - - parser.add_argument('--output', required=True, help='Dex output path.') - parser.add_argument('--r8-path', required=True, help='Path to R8 jar.') - parser.add_argument( - '--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.') - parser.add_argument( - '--desugar-jdk-libs-jar', help='Path to desugar_jdk_libs.jar.') - parser.add_argument('--desugar-jdk-libs-configuration-jar', - help='Path to desugar_jdk_libs_configuration.jar.') - parser.add_argument('--min-api', help='minSdkVersion', required=True) - parser.add_argument('--warnings-as-errors', - action='store_true', - help='Treat all warnings as errors.') - options = parser.parse_args(args) - return options - - -def DexJdkLibJar(r8_path, - min_api, - desugar_jdk_libs_json, - desugar_jdk_libs_jar, - desugar_jdk_libs_configuration_jar, - output, - warnings_as_errors, - config_paths=None): - # TODO(agrieve): Spews a lot of stderr about missing classes. - with build_utils.TempDir() as tmp_dir: - cmd = build_utils.JavaCmd(warnings_as_errors) + [ - '-cp', - r8_path, - 'com.android.tools.r8.L8', - '--min-api', - min_api, - '--lib', - build_utils.JAVA_HOME, - '--desugared-lib', - desugar_jdk_libs_json, - ] - - # If no desugaring is required, no keep rules are generated, and the keep - # file will not be created. - if config_paths is not None: - for path in config_paths: - cmd += ['--pg-conf', path] - - cmd += [ - '--output', tmp_dir, desugar_jdk_libs_jar, - desugar_jdk_libs_configuration_jar - ] - - build_utils.CheckOutput(cmd, - print_stdout=True, - fail_on_output=warnings_as_errors) - if os.path.exists(os.path.join(tmp_dir, 'classes2.dex')): - raise Exception('Achievement unlocked: desugar_jdk_libs is multidex!') - - # classes.dex might not exists if the "desugar_jdk_libs_jar" is not used - # at all. - if os.path.exists(os.path.join(tmp_dir, 'classes.dex')): - shutil.move(os.path.join(tmp_dir, 'classes.dex'), output) - return True - return False - - -def main(args): - options = _ParseArgs(args) - DexJdkLibJar(options.r8_path, options.min_api, options.desugar_jdk_libs_json, - options.desugar_jdk_libs_jar, - options.desugar_jdk_libs_configuration_jar, options.output, - options.warnings_as_errors) - - -if __name__ == '__main__': - main(sys.argv[1:]) diff --git a/build/android/gyp/dex_test.py b/build/android/gyp/dex_test.py new file mode 100755 index 000000000000..5042e5fc3799 --- /dev/null +++ b/build/android/gyp/dex_test.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import unittest + +import dex + + +class DexTest(unittest.TestCase): + def testStdErrFilter(self): + # pylint: disable=line-too-long + output = """\ +some initial message +Warning: Specification conversion: The following prefixes do not match any type: [Ljava/util/Desugar] +Warning in ../../clank/third_party/google3/pg_confs/java_com_google_protobuf_lite_proguard.pgcfg: +Rule matches the static final field `java.lang.String com.google.protobuf.BaseGeneratedExtensionRegistryLite.CONTAINING_TYPE_0`, which may have been inlined: -identifiernamestring class com.google.protobuf.*GeneratedExtensionRegistryLite { + static java.lang.String CONTAINING_TYPE_*; +} +Warning: some message +Warning in gen/.../Foo.jar:Bar.class: + Type `libcore.io.Memory` was not found, it is required for default or static interface methods desugaring of `void Bar.a(long, byte)` +Warning: Missing class com.google.android.apps.gsa.search.shared.service.proto.PublicStopClientEvent (referenced from: com.google.protobuf.GeneratedMessageLite$GeneratedExtension com.google.protobuf.BaseGeneratedExtensionRegistryLite.findLiteExtensionByNumber(com.google.protobuf.MessageLite, int)) +Missing class com.google.android.gms.feedback.ApplicationProperties (referenced from: com.google.protobuf.GeneratedMessageLite$GeneratedExtension com.google.protobuf.BaseGeneratedExtensionRegistryLite.findLiteExtensionByNumber(com.google.protobuf.MessageLite, int)) +""" + expected = """\ +some initial message +Warning: some message +""" + # pylint: enable=line-too-long + filter_func = dex.CreateStderrFilter( + show_desugar_default_interface_warnings=False) + self.assertEqual(filter_func(output), expected) + + # Test no preamble, not filtered. + output = """Warning: hi""" + expected = output + self.assertEqual(filter_func(output), expected) + + # Test no preamble, filtered + output = """\ +Warning: Specification conversion: The following prefixes do not ... +""" + expected = '' + self.assertEqual(filter_func(output), expected) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/android/gyp/dexsplitter.py b/build/android/gyp/dexsplitter.py deleted file mode 100755 index 149e994f5145..000000000000 --- a/build/android/gyp/dexsplitter.py +++ /dev/null @@ -1,132 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import argparse -import os -import shutil -import sys -import zipfile - -from util import build_utils - - -def _ParseOptions(args): - parser = argparse.ArgumentParser() - parser.add_argument('--depfile', help='Path to the depfile to write to.') - parser.add_argument('--stamp', help='Path to stamp to mark when finished.') - parser.add_argument('--r8-path', help='Path to the r8.jar to use.') - parser.add_argument( - '--input-dex-zip', help='Path to dex files in zip being split.') - parser.add_argument( - '--proguard-mapping-file', help='Path to proguard mapping file.') - parser.add_argument( - '--feature-name', - action='append', - dest='feature_names', - help='The name of the feature module.') - parser.add_argument( - '--feature-jars', - action='append', - help='GN list of path to jars which compirse the corresponding feature.') - parser.add_argument( - '--dex-dest', - action='append', - dest='dex_dests', - help='Destination for dex file of the corresponding feature.') - options = parser.parse_args(args) - - assert len(options.feature_names) == len(options.feature_jars) and len( - options.feature_names) == len(options.dex_dests) - options.features = {} - for i, name in enumerate(options.feature_names): - options.features[name] = build_utils.ParseGnList(options.feature_jars[i]) - - return options - - -def _RunDexsplitter(options, output_dir): - cmd = build_utils.JavaCmd() + [ - '-cp', - options.r8_path, - 'com.android.tools.r8.dexsplitter.DexSplitter', - '--output', - output_dir, - '--proguard-map', - options.proguard_mapping_file, - ] - - for base_jar in options.features['base']: - cmd += ['--base-jar', base_jar] - - base_jars_lookup = set(options.features['base']) - for feature in options.features: - if feature == 'base': - continue - for feature_jar in options.features[feature]: - if feature_jar not in base_jars_lookup: - cmd += ['--feature-jar', feature_jar + ':' + feature] - - with build_utils.TempDir() as temp_dir: - unzipped_files = build_utils.ExtractAll(options.input_dex_zip, temp_dir) - for file_name in unzipped_files: - cmd += ['--input', file_name] - build_utils.CheckOutput(cmd) - - -def main(args): - args = build_utils.ExpandFileArgs(args) - options = _ParseOptions(args) - - input_paths = [options.input_dex_zip] - for feature_jars in options.features.itervalues(): - for feature_jar in feature_jars: - input_paths.append(feature_jar) - - with build_utils.TempDir() as dexsplitter_output_dir: - curr_location_to_dest = [] - if len(options.features) == 1: - # Don't run dexsplitter since it needs at least 1 feature module. - curr_location_to_dest.append((options.input_dex_zip, - options.dex_dests[0])) - else: - _RunDexsplitter(options, dexsplitter_output_dir) - - for i, dest in enumerate(options.dex_dests): - module_dex_file = os.path.join(dexsplitter_output_dir, - options.feature_names[i], 'classes.dex') - if os.path.exists(module_dex_file): - curr_location_to_dest.append((module_dex_file, dest)) - else: - module_dex_file += '.jar' - assert os.path.exists( - module_dex_file), 'Dexsplitter tool output not found.' - curr_location_to_dest.append((module_dex_file + '.jar', dest)) - - for curr_location, dest in curr_location_to_dest: - with build_utils.AtomicOutput(dest) as f: - if curr_location.endswith('.jar'): - if dest.endswith('.jar'): - shutil.copy(curr_location, f.name) - else: - with zipfile.ZipFile(curr_location, 'r') as z: - namelist = z.namelist() - assert len(namelist) == 1, ( - 'Unzipping to single dex file, but not single dex file in ' + - options.input_dex_zip) - z.extract(namelist[0], f.name) - else: - if dest.endswith('.jar'): - build_utils.ZipDir( - f.name, os.path.abspath(os.path.join(curr_location, os.pardir))) - else: - shutil.move(curr_location, f.name) - - build_utils.Touch(options.stamp) - build_utils.WriteDepfile(options.depfile, options.stamp, inputs=input_paths) - - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/dist_aar.py b/build/android/gyp/dist_aar.py index 7f0de1d687ef..507d0c3d83c9 100755 --- a/build/android/gyp/dist_aar.py +++ b/build/android/gyp/dist_aar.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -16,6 +16,8 @@ import filter_zip from util import build_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers _ANDROID_BUILD_DIR = os.path.dirname(os.path.dirname(__file__)) @@ -65,7 +67,7 @@ def _AddResources(aar_zip, resource_zips, include_globs): def main(args): args = build_utils.ExpandFileArgs(args) parser = argparse.ArgumentParser() - build_utils.AddDepfileOption(parser) + action_helpers.add_depfile_arg(parser) parser.add_argument('--output', required=True, help='Path to output aar.') parser.add_argument('--jars', required=True, help='GN list of jar inputs.') parser.add_argument('--dependencies-res-zips', required=True, @@ -98,52 +100,60 @@ def main(args): if options.native_libraries and not options.abi: parser.error('You must provide --abi if you have native libs') - options.jars = build_utils.ParseGnList(options.jars) - options.dependencies_res_zips = build_utils.ParseGnList( + options.jars = action_helpers.parse_gn_list(options.jars) + options.dependencies_res_zips = action_helpers.parse_gn_list( options.dependencies_res_zips) - options.r_text_files = build_utils.ParseGnList(options.r_text_files) - options.proguard_configs = build_utils.ParseGnList(options.proguard_configs) - options.native_libraries = build_utils.ParseGnList(options.native_libraries) - options.jar_excluded_globs = build_utils.ParseGnList( + options.r_text_files = action_helpers.parse_gn_list(options.r_text_files) + options.proguard_configs = action_helpers.parse_gn_list( + options.proguard_configs) + options.native_libraries = action_helpers.parse_gn_list( + options.native_libraries) + options.jar_excluded_globs = action_helpers.parse_gn_list( options.jar_excluded_globs) - options.jar_included_globs = build_utils.ParseGnList( + options.jar_included_globs = action_helpers.parse_gn_list( options.jar_included_globs) - options.resource_included_globs = build_utils.ParseGnList( + options.resource_included_globs = action_helpers.parse_gn_list( options.resource_included_globs) with tempfile.NamedTemporaryFile(delete=False) as staging_file: try: with zipfile.ZipFile(staging_file.name, 'w') as z: - build_utils.AddToZipHermetic( - z, 'AndroidManifest.xml', src_path=options.android_manifest) + zip_helpers.add_to_zip_hermetic(z, + 'AndroidManifest.xml', + src_path=options.android_manifest) path_transform = filter_zip.CreatePathTransform( - options.jar_excluded_globs, options.jar_included_globs, []) + options.jar_excluded_globs, options.jar_included_globs) with tempfile.NamedTemporaryFile() as jar_file: - build_utils.MergeZips( - jar_file.name, options.jars, path_transform=path_transform) - build_utils.AddToZipHermetic(z, 'classes.jar', src_path=jar_file.name) - - build_utils.AddToZipHermetic( - z, - 'R.txt', - data=_MergeRTxt(options.r_text_files, - options.resource_included_globs)) - build_utils.AddToZipHermetic(z, 'public.txt', data='') + zip_helpers.merge_zips(jar_file.name, + options.jars, + path_transform=path_transform) + zip_helpers.add_to_zip_hermetic(z, + 'classes.jar', + src_path=jar_file.name) + + zip_helpers.add_to_zip_hermetic(z, + 'R.txt', + data=_MergeRTxt( + options.r_text_files, + options.resource_included_globs)) + zip_helpers.add_to_zip_hermetic(z, 'public.txt', data='') if options.proguard_configs: - build_utils.AddToZipHermetic( - z, 'proguard.txt', - data=_MergeProguardConfigs(options.proguard_configs)) + zip_helpers.add_to_zip_hermetic(z, + 'proguard.txt', + data=_MergeProguardConfigs( + options.proguard_configs)) _AddResources(z, options.dependencies_res_zips, options.resource_included_globs) for native_library in options.native_libraries: libname = os.path.basename(native_library) - build_utils.AddToZipHermetic( - z, os.path.join('jni', options.abi, libname), - src_path=native_library) + zip_helpers.add_to_zip_hermetic(z, + os.path.join('jni', options.abi, + libname), + src_path=native_library) except: os.unlink(staging_file.name) raise @@ -152,7 +162,7 @@ def main(args): if options.depfile: all_inputs = (options.jars + options.dependencies_res_zips + options.r_text_files + options.proguard_configs) - build_utils.WriteDepfile(options.depfile, options.output, all_inputs) + action_helpers.write_depfile(options.depfile, options.output, all_inputs) if __name__ == '__main__': diff --git a/build/android/gyp/dist_aar.pydeps b/build/android/gyp/dist_aar.pydeps index 3182580af76a..ba0dd52590eb 100644 --- a/build/android/gyp/dist_aar.pydeps +++ b/build/android/gyp/dist_aar.pydeps @@ -1,6 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dist_aar.pydeps build/android/gyp/dist_aar.py +../../action_helpers.py ../../gn_helpers.py +../../zip_helpers.py dist_aar.py filter_zip.py util/__init__.py diff --git a/build/android/gyp/extract_unwind_tables.py b/build/android/gyp/extract_unwind_tables.py index 25c3130e6353..de0f016b5a3b 100755 --- a/build/android/gyp/extract_unwind_tables.py +++ b/build/android/gyp/extract_unwind_tables.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -10,7 +10,7 @@ The output file is a binary file containing CFI rows ordered based on function address. The output file only contains rows that match the most popular rule type in CFI table, to reduce the output size and specify data in compact format. -See doc https://github.com/google/breakpad/blob/master/docs/symbol_files.md. +See doc https://github.com/google/breakpad/blob/main/docs/symbol_files.md. 1. The CFA rules should be of postfix form "SP +". 2. The RA rules should be of postfix form "CFA + ^". Note: breakpad represents dereferencing address with '^' operator. @@ -255,12 +255,6 @@ def _WriteCfiData(cfi_data, out_file): _Write2Bytes(out_file, data) -def _ParseCfiData(sym_stream, output_path): - cfi_data = _GetAllCfiRows(sym_stream) - with open(output_path, 'wb') as out_file: - _WriteCfiData(cfi_data, out_file) - - def main(): parser = argparse.ArgumentParser() parser.add_argument( @@ -274,12 +268,16 @@ def main(): help='The path of the dump_syms binary') args = parser.parse_args() - cmd = ['./' + args.dump_syms_path, args.input_path] - proc = subprocess.Popen(cmd, bufsize=-1, stdout=subprocess.PIPE) - _ParseCfiData(proc.stdout, args.output_path) - assert proc.wait() == 0 + cmd = ['./' + args.dump_syms_path, args.input_path, '-v'] + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE) + cfi_data = _GetAllCfiRows(proc.stdout) + if proc.wait(): + sys.stderr.write('dump_syms exited with code {} after {} symbols\n'.format( + proc.returncode, len(cfi_data))) + sys.exit(proc.returncode) + with open(args.output_path, 'wb') as out_file: + _WriteCfiData(cfi_data, out_file) - return 0 if __name__ == '__main__': - sys.exit(main()) + main() diff --git a/build/android/gyp/extract_unwind_tables_tests.py b/build/android/gyp/extract_unwind_tables_tests.py index 59436ff2cd37..dd716bf20b3c 100755 --- a/build/android/gyp/extract_unwind_tables_tests.py +++ b/build/android/gyp/extract_unwind_tables_tests.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -9,6 +9,7 @@ symbol files. """ +import io import optparse import os import struct @@ -24,8 +25,7 @@ class TestExtractUnwindTables(unittest.TestCase): def testExtractCfi(self): - with tempfile.NamedTemporaryFile() as output_file: - test_data_lines = """ + test_data_lines = """ MODULE Linux arm CDE12FE1DF2B37A9C6560B4CBEE056420 lib_chrome.so INFO CODE_ID E12FE1CD2BDFA937C6560B4CBEE05642 FILE 0 ../../base/allocator/allocator_check.cc @@ -63,57 +63,58 @@ def testExtractCfi(self): STACK CFI INIT 3b93214 fffff .cfa: sp 0 + .ra: lr STACK CFI 3b93218 .cfa: r7 16 + .ra: .cfa -4 + ^ """.splitlines() - extract_unwind_tables._ParseCfiData( - [l.encode('utf8') for l in test_data_lines], output_file.name) + cfi_data = extract_unwind_tables._GetAllCfiRows( + [l.encode('utf8') for l in test_data_lines]) + out_file = io.BytesIO() + extract_unwind_tables._WriteCfiData(cfi_data, out_file) - expected_cfi_data = { - 0xe1a1e4 : [0x2, 0x11, 0x4, 0x50], - 0xe1a296 : [], - 0xe1a96e : [0x2, 0x4, 0x4, 0xe, 0x6, 0x10], - 0xe1a990 : [], + expected_cfi_data = { + 0xe1a1e4: [0x2, 0x11, 0x4, 0x50], + 0xe1a296: [], + 0xe1a96e: [0x2, 0x4, 0x4, 0xe, 0x6, 0x10], + 0xe1a990: [], 0x3b92e24: [0x28, 0x13], 0x3b92e62: [], - } - expected_function_count = len(expected_cfi_data) + } + expected_function_count = len(expected_cfi_data) - actual_output = [] - with open(output_file.name, 'rb') as f: - while True: - read = f.read(2) - if not read: - break - actual_output.append(struct.unpack('H', read)[0]) + actual_output = [] + out_file.seek(0) + while True: + read = out_file.read(2) + if not read: + break + actual_output.append(struct.unpack('H', read)[0]) - # First value is size of unw_index table. - unw_index_size = actual_output[1] << 16 | actual_output[0] - # |unw_index_size| should match entry count. - self.assertEqual(expected_function_count, unw_index_size) - # |actual_output| is in blocks of 2 bytes. Skip first 4 bytes representing - # size. - unw_index_start = 2 - unw_index_addr_end = unw_index_start + expected_function_count * 2 - unw_index_end = unw_index_addr_end + expected_function_count - unw_index_addr_col = actual_output[unw_index_start : unw_index_addr_end] - unw_index_index_col = actual_output[unw_index_addr_end : unw_index_end] + # First value is size of unw_index table. + unw_index_size = actual_output[1] << 16 | actual_output[0] + # |unw_index_size| should match entry count. + self.assertEqual(expected_function_count, unw_index_size) + # |actual_output| is in blocks of 2 bytes. Skip first 4 bytes representing + # size. + unw_index_start = 2 + unw_index_addr_end = unw_index_start + expected_function_count * 2 + unw_index_end = unw_index_addr_end + expected_function_count + unw_index_addr_col = actual_output[unw_index_start:unw_index_addr_end] + unw_index_index_col = actual_output[unw_index_addr_end:unw_index_end] - unw_data_start = unw_index_end - unw_data = actual_output[unw_data_start:] + unw_data_start = unw_index_end + unw_data = actual_output[unw_data_start:] - for func_iter in range(0, expected_function_count): - func_addr = (unw_index_addr_col[func_iter * 2 + 1] << 16 | - unw_index_addr_col[func_iter * 2]) - index = unw_index_index_col[func_iter] - # If index is CANT_UNWIND then invalid function. - if index == 0xFFFF: - self.assertEqual(expected_cfi_data[func_addr], []) - continue + for func_iter in range(0, expected_function_count): + func_addr = (unw_index_addr_col[func_iter * 2 + 1] << 16 + | unw_index_addr_col[func_iter * 2]) + index = unw_index_index_col[func_iter] + # If index is CANT_UNWIND then invalid function. + if index == 0xFFFF: + self.assertEqual(expected_cfi_data[func_addr], []) + continue - func_start = index + 1 - func_end = func_start + unw_data[index] * 2 - self.assertEqual(len(expected_cfi_data[func_addr]), - func_end - func_start) - func_cfi = unw_data[func_start : func_end] - self.assertEqual(expected_cfi_data[func_addr], func_cfi) + func_start = index + 1 + func_end = func_start + unw_data[index] * 2 + self.assertEqual(len(expected_cfi_data[func_addr]), func_end - func_start) + func_cfi = unw_data[func_start:func_end] + self.assertEqual(expected_cfi_data[func_addr], func_cfi) if __name__ == '__main__': diff --git a/build/android/gyp/filter_zip.py b/build/android/gyp/filter_zip.py index 068ff03eb143..0382651bfc4b 100755 --- a/build/android/gyp/filter_zip.py +++ b/build/android/gyp/filter_zip.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -9,38 +9,25 @@ import sys from util import build_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers -_RESOURCE_CLASSES = [ - "R.class", - "R##*.class", - "Manifest.class", - "Manifest##*.class", -] - - -def CreatePathTransform(exclude_globs, include_globs, - strip_resource_classes_for): +def CreatePathTransform(exclude_globs, include_globs): """Returns a function to strip paths for the given patterns. Args: exclude_globs: List of globs that if matched should be excluded. include_globs: List of globs that if not matched should be excluded. - strip_resource_classes_for: List of Java packages for which to strip - R.java classes from. Returns: * None if no filters are needed. * A function "(path) -> path" that returns None when |path| should be stripped, or |path| otherwise. """ - if not (exclude_globs or include_globs or strip_resource_classes_for): + if not (exclude_globs or include_globs): return None exclude_globs = list(exclude_globs or []) - if strip_resource_classes_for: - exclude_globs.extend(p.replace('.', '/') + '/' + f - for p in strip_resource_classes_for - for f in _RESOURCE_CLASSES) def path_transform(path): # Exclude filters take precidence over include filters. if build_utils.MatchesGlob(path, exclude_globs): @@ -62,22 +49,17 @@ def main(): help='GN list of exclude globs') parser.add_argument('--include-globs', help='GN list of include globs') - parser.add_argument('--strip-resource-classes-for', - help='GN list of java package names exclude R.class files in.') - argv = build_utils.ExpandFileArgs(sys.argv[1:]) args = parser.parse_args(argv) - args.exclude_globs = build_utils.ParseGnList(args.exclude_globs) - args.include_globs = build_utils.ParseGnList(args.include_globs) - args.strip_resource_classes_for = build_utils.ParseGnList( - args.strip_resource_classes_for) + args.exclude_globs = action_helpers.parse_gn_list(args.exclude_globs) + args.include_globs = action_helpers.parse_gn_list(args.include_globs) - path_transform = CreatePathTransform(args.exclude_globs, args.include_globs, - args.strip_resource_classes_for) - with build_utils.AtomicOutput(args.output) as f: + path_transform = CreatePathTransform(args.exclude_globs, args.include_globs) + with action_helpers.atomic_output(args.output) as f: if path_transform: - build_utils.MergeZips(f.name, [args.input], path_transform=path_transform) + zip_helpers.merge_zips(f.name, [args.input], + path_transform=path_transform) else: shutil.copy(args.input, f.name) diff --git a/build/android/gyp/filter_zip.pydeps b/build/android/gyp/filter_zip.pydeps index f561e05c45ca..4905fd5d73a1 100644 --- a/build/android/gyp/filter_zip.pydeps +++ b/build/android/gyp/filter_zip.pydeps @@ -1,6 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/filter_zip.pydeps build/android/gyp/filter_zip.py +../../action_helpers.py ../../gn_helpers.py +../../zip_helpers.py filter_zip.py util/__init__.py util/build_utils.py diff --git a/build/android/gyp/finalize_apk.py b/build/android/gyp/finalize_apk.py index b465f713db56..aaf66c2af862 100644 --- a/build/android/gyp/finalize_apk.py +++ b/build/android/gyp/finalize_apk.py @@ -1,4 +1,4 @@ -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Signs and aligns an APK.""" @@ -38,7 +38,7 @@ def FinalizeApk(apksigner_path, else: signer_input_path = unsigned_apk_path - sign_cmd = build_utils.JavaCmd(warnings_as_errors) + [ + sign_cmd = build_utils.JavaCmd() + [ '-jar', apksigner_path, 'sign', diff --git a/build/android/gyp/find.py b/build/android/gyp/find.py index b05874bfb7e1..617efef3ff07 100755 --- a/build/android/gyp/find.py +++ b/build/android/gyp/find.py @@ -1,13 +1,12 @@ #!/usr/bin/env python3 # -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Finds files in directories. """ -from __future__ import print_function import fnmatch import optparse @@ -27,6 +26,7 @@ def main(argv): for root, _, filenames in os.walk(d): for f in fnmatch.filter(filenames, options.pattern): print(os.path.join(root, f)) + return 0 if __name__ == '__main__': diff --git a/build/android/gyp/flatc_java.py b/build/android/gyp/flatc_java.py new file mode 100755 index 000000000000..003f8201f4be --- /dev/null +++ b/build/android/gyp/flatc_java.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python3 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Generate java source files from flatbuffer files. + +This is the action script for the flatbuffer_java_library template. +""" + +import argparse +import sys + +from util import build_utils +import action_helpers +import zip_helpers + + +def main(argv): + parser = argparse.ArgumentParser() + parser.add_argument('--flatc', required=True, help='Path to flatc binary.') + parser.add_argument('--srcjar', required=True, help='Path to output srcjar.') + parser.add_argument( + '--import-dir', + action='append', + default=[], + help='Extra import directory for flatbuffers, can be repeated.') + parser.add_argument('flatbuffers', nargs='+', help='flatbuffer source files') + options = parser.parse_args(argv) + + import_args = [] + for path in options.import_dir: + import_args += ['-I', path] + with build_utils.TempDir() as temp_dir: + build_utils.CheckOutput([options.flatc, '-j', '-o', temp_dir] + + import_args + options.flatbuffers) + + with action_helpers.atomic_output(options.srcjar) as f: + zip_helpers.zip_directory(f, temp_dir) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/dex_jdk_libs.pydeps b/build/android/gyp/flatc_java.pydeps similarity index 53% rename from build/android/gyp/dex_jdk_libs.pydeps rename to build/android/gyp/flatc_java.pydeps index 28d181f528e3..8c0c4f01fc06 100644 --- a/build/android/gyp/dex_jdk_libs.pydeps +++ b/build/android/gyp/flatc_java.pydeps @@ -1,6 +1,8 @@ # Generated by running: -# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dex_jdk_libs.pydeps build/android/gyp/dex_jdk_libs.py +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/flatc_java.pydeps build/android/gyp/flatc_java.py +../../action_helpers.py ../../gn_helpers.py -dex_jdk_libs.py +../../zip_helpers.py +flatc_java.py util/__init__.py util/build_utils.py diff --git a/build/android/gyp/gcc_preprocess.py b/build/android/gyp/gcc_preprocess.py index 70ae10fc1321..2e5b3b3b5db2 100755 --- a/build/android/gyp/gcc_preprocess.py +++ b/build/android/gyp/gcc_preprocess.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -12,6 +12,8 @@ import zipfile from util import build_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers def _ParsePackageName(data): @@ -32,8 +34,8 @@ def main(args): parser.add_argument('templates', nargs='+', help='Template files.') options = parser.parse_args(args) - options.defines = build_utils.ParseGnList(options.defines) - options.include_dirs = build_utils.ParseGnList(options.include_dirs) + options.defines = action_helpers.parse_gn_list(options.defines) + options.include_dirs = action_helpers.parse_gn_list(options.include_dirs) gcc_cmd = [ 'gcc', @@ -46,7 +48,7 @@ def main(args): gcc_cmd.extend('-D' + x for x in options.defines) gcc_cmd.extend('-I' + x for x in options.include_dirs) - with build_utils.AtomicOutput(options.output) as f: + with action_helpers.atomic_output(options.output) as f: with zipfile.ZipFile(f, 'w') as z: for template in options.templates: data = build_utils.CheckOutput(gcc_cmd + [template]) @@ -56,7 +58,7 @@ def main(args): zip_path = posixpath.join( package_name.replace('.', '/'), os.path.splitext(os.path.basename(template))[0]) + '.java' - build_utils.AddToZipHermetic(z, zip_path, data=data) + zip_helpers.add_to_zip_hermetic(z, zip_path, data=data) if __name__ == '__main__': diff --git a/build/android/gyp/gcc_preprocess.pydeps b/build/android/gyp/gcc_preprocess.pydeps index 39e56f70082c..b57d40042751 100644 --- a/build/android/gyp/gcc_preprocess.pydeps +++ b/build/android/gyp/gcc_preprocess.pydeps @@ -1,6 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/gcc_preprocess.pydeps build/android/gyp/gcc_preprocess.py +../../action_helpers.py ../../gn_helpers.py +../../zip_helpers.py gcc_preprocess.py util/__init__.py util/build_utils.py diff --git a/build/android/gyp/generate_android_wrapper.py b/build/android/gyp/generate_android_wrapper.py index c8b762c75463..46c7afeabed8 100755 --- a/build/android/gyp/generate_android_wrapper.py +++ b/build/android/gyp/generate_android_wrapper.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -8,6 +8,7 @@ import sys from util import build_utils +import action_helpers # build_utils adds //build to sys.path. sys.path.append( os.path.abspath( @@ -23,7 +24,7 @@ def ExpandWrappedPathLists(args): for arg in args: m = _WRAPPED_PATH_LIST_RE.match(arg) if m: - for p in build_utils.ParseGnList(m.group(2)): + for p in action_helpers.parse_gn_list(m.group(2)): expanded_args.extend([m.group(1), '@WrappedPath(%s)' % p]) else: expanded_args.append(arg) diff --git a/build/android/gyp/generate_linker_version_script.py b/build/android/gyp/generate_linker_version_script.py index 995fcd7b88be..4f34457626f1 100755 --- a/build/android/gyp/generate_linker_version_script.py +++ b/build/android/gyp/generate_linker_version_script.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Generate linker version scripts for Chrome on Android shared libraries.""" @@ -8,6 +8,7 @@ import os from util import build_utils +import action_helpers # build_utils adds //build to sys.path. _SCRIPT_HEADER = """\ # AUTO-GENERATED FILE. DO NOT MODIFY. @@ -32,9 +33,12 @@ def main(): required=True, help='Path to output linker version script file.') parser.add_argument( - '--export-java-symbols', + '--jni-multiplexing', action='store_true', - help='Export Java_* JNI methods') + help='Export only the JNI methods generated by multiplexing') + parser.add_argument('--export-fortesting-java-symbols', + action='store_true', + help='Export Java_*_ForTesting JNI methods') parser.add_argument( '--export-symbol-allowlist-file', action='append', @@ -53,8 +57,30 @@ def main(): # for libcrashpad_handler_trampoline.so. symbol_list = ['CrashpadHandlerMain', 'JNI_OnLoad'] - if options.export_java_symbols: + if options.jni_multiplexing: + symbol_list.append('Java_*_resolve_1for_*') + elif options.export_fortesting_java_symbols: symbol_list.append('Java_*') + else: + # The linker uses unix shell globbing patterns, not regex. So, we have to + # include everything that doesn't end in "ForTest(ing)" with this set of + # globs. + symbol_list.append('Java_*[!F]orTesting') + symbol_list.append('Java_*[!o]rTesting') + symbol_list.append('Java_*[!r]Testing') + symbol_list.append('Java_*[!T]esting') + symbol_list.append('Java_*[!e]sting') + symbol_list.append('Java_*[!s]ting') + symbol_list.append('Java_*[!t]ing') + symbol_list.append('Java_*[!i]ng') + symbol_list.append('Java_*[!n]g') + symbol_list.append('Java_*[!F]orTest') + symbol_list.append('Java_*[!o]rTest') + symbol_list.append('Java_*[!r]Test') + symbol_list.append('Java_*[!T]est') + symbol_list.append('Java_*[!e]st') + symbol_list.append('Java_*[!s]t') + symbol_list.append('Java_*[!gt]') if options.export_feature_registrations: symbol_list.append('JNI_OnLoad_*') @@ -74,7 +100,7 @@ def main(): script = ''.join(script_content) - with build_utils.AtomicOutput(options.output, mode='w') as f: + with action_helpers.atomic_output(options.output, mode='w') as f: f.write(script) diff --git a/build/android/gyp/generate_linker_version_script.pydeps b/build/android/gyp/generate_linker_version_script.pydeps index de9fa56a95a6..03ac25d5c058 100644 --- a/build/android/gyp/generate_linker_version_script.pydeps +++ b/build/android/gyp/generate_linker_version_script.pydeps @@ -1,5 +1,6 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/generate_linker_version_script.pydeps build/android/gyp/generate_linker_version_script.py +../../action_helpers.py ../../gn_helpers.py generate_linker_version_script.py util/__init__.py diff --git a/build/android/gyp/ijar.py b/build/android/gyp/ijar.py index 45413f62fd98..ec12cecaa171 100755 --- a/build/android/gyp/ijar.py +++ b/build/android/gyp/ijar.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -10,6 +10,7 @@ import sys from util import build_utils +import action_helpers # build_utils adds //build to sys.path. # python -c "import zipfile; zipfile.ZipFile('test.jar', 'w')" @@ -20,8 +21,10 @@ def main(): # The point of this wrapper is to use AtomicOutput so that output timestamps # are not updated when outputs are unchanged. - ijar_bin, in_jar, out_jar = sys.argv[1:] - with build_utils.AtomicOutput(out_jar) as f: + if len(sys.argv) != 4: + raise ValueError('unexpected arguments were given. %s' % sys.argv) + ijar_bin, in_jar, out_jar = sys.argv[1], sys.argv[2], sys.argv[3] + with action_helpers.atomic_output(out_jar) as f: # ijar fails on empty jars: https://github.com/bazelbuild/bazel/issues/10162 if os.path.getsize(in_jar) <= _EMPTY_JAR_SIZE: with open(in_jar, 'rb') as in_f: diff --git a/build/android/gyp/ijar.pydeps b/build/android/gyp/ijar.pydeps index e9ecb6636daf..530aabe8e5d6 100644 --- a/build/android/gyp/ijar.pydeps +++ b/build/android/gyp/ijar.pydeps @@ -1,5 +1,6 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/ijar.pydeps build/android/gyp/ijar.py +../../action_helpers.py ../../gn_helpers.py ijar.py util/__init__.py diff --git a/build/android/gyp/jacoco_instr.py b/build/android/gyp/jacoco_instr.py index 8e5f29c9cdc7..f32d6e87d235 100755 --- a/build/android/gyp/jacoco_instr.py +++ b/build/android/gyp/jacoco_instr.py @@ -1,9 +1,8 @@ #!/usr/bin/env python3 # -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. - """Instruments classes and jar files. This script corresponds to the 'jacoco_instr' action in the Java build process. @@ -13,17 +12,20 @@ """ -from __future__ import print_function - import argparse import json import os import shutil import sys -import tempfile import zipfile from util import build_utils +import action_helpers +import zip_helpers + + +# This should be same as recipe side token. See bit.ly/3STSPcE. +INSTRUMENT_ALL_JACOCO_OVERRIDE_TOKEN = 'INSTRUMENT_ALL_JACOCO' def _AddArguments(parser): @@ -49,9 +51,9 @@ def _AddArguments(parser): help='File to create with the list of source directories ' 'and input path.') parser.add_argument( - '--java-sources-file', + '--target-sources-file', required=True, - help='File containing newline-separated .java paths') + help='File containing newline-separated .java and .kt paths') parser.add_argument( '--jacococli-jar', required=True, help='Path to jacococli.jar.') parser.add_argument( @@ -101,10 +103,12 @@ def _CreateSourcesJsonFile(source_dirs, input_path, sources_json_file, data = {} data['source_dirs'] = relative_sources data['input_path'] = [] + data['output_dir'] = src_root if input_path: data['input_path'].append(os.path.abspath(input_path)) with open(sources_json_file, 'w') as f: json.dump(data, f) + return 0 def _GetAffectedClasses(jar_file, source_files): @@ -133,7 +137,8 @@ def _GetAffectedClasses(jar_file, source_files): if index == -1: index = member.find('.class') for source_file in source_files: - if source_file.endswith(member[:index] + '.java'): + if source_file.endswith( + (member[:index] + '.java', member[:index] + '.kt')): affected_classes.append(member) is_affected = True break @@ -180,7 +185,8 @@ def _InstrumentClassFiles(instrument_cmd, f.extractall(instrumented_dir, unaffected_members) # Zip all files to output_path - build_utils.ZipDir(output_path, instrumented_dir) + with action_helpers.atomic_output(output_path) as f: + zip_helpers.zip_directory(f, instrumented_dir) def _RunInstrumentCommand(parser): @@ -195,8 +201,8 @@ def _RunInstrumentCommand(parser): args = parser.parse_args() source_files = [] - if args.java_sources_file: - source_files.extend(build_utils.ReadSourcesList(args.java_sources_file)) + if args.target_sources_file: + source_files.extend(build_utils.ReadSourcesList(args.target_sources_file)) with build_utils.TempDir() as temp_dir: instrument_cmd = build_utils.JavaCmd() + [ @@ -204,23 +210,32 @@ def _RunInstrumentCommand(parser): ] if not args.files_to_instrument: - _InstrumentClassFiles(instrument_cmd, args.input_path, args.output_path, - temp_dir) + affected_source_files = None else: affected_files = build_utils.ReadSourcesList(args.files_to_instrument) - source_set = set(source_files) - affected_source_files = [f for f in affected_files if f in source_set] - - # Copy input_path to output_path and return if no source file affected. - if not affected_source_files: - shutil.copyfile(args.input_path, args.output_path) - # Create a dummy sources_json_file. - _CreateSourcesJsonFile([], None, args.sources_json_file, - build_utils.DIR_SOURCE_ROOT) - return 0 + # Check if coverage recipe decided to instrument everything by overriding + # the try builder default setting(selective instrumentation). This can + # happen in cases like a DEPS roll of jacoco library + + # Note: This token is preceded by ../../ because the paths to be + # instrumented are expected to be relative to the build directory. + # See _rebase_paths() at https://bit.ly/40oiixX + token = '../../' + INSTRUMENT_ALL_JACOCO_OVERRIDE_TOKEN + if token in affected_files: + affected_source_files = None else: - _InstrumentClassFiles(instrument_cmd, args.input_path, args.output_path, - temp_dir, affected_source_files) + source_set = set(source_files) + affected_source_files = [f for f in affected_files if f in source_set] + + # Copy input_path to output_path and return if no source file affected. + if not affected_source_files: + shutil.copyfile(args.input_path, args.output_path) + # Create a dummy sources_json_file. + _CreateSourcesJsonFile([], None, args.sources_json_file, + build_utils.DIR_SOURCE_ROOT) + return 0 + _InstrumentClassFiles(instrument_cmd, args.input_path, args.output_path, + temp_dir, affected_source_files) source_dirs = _GetSourceDirsFromSourceFiles(source_files) # TODO(GYP): In GN, we are passed the list of sources, detecting source diff --git a/build/android/gyp/jacoco_instr.pydeps b/build/android/gyp/jacoco_instr.pydeps index d7fec19fde69..9c763fc6249f 100644 --- a/build/android/gyp/jacoco_instr.pydeps +++ b/build/android/gyp/jacoco_instr.pydeps @@ -1,6 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/jacoco_instr.pydeps build/android/gyp/jacoco_instr.py +../../action_helpers.py ../../gn_helpers.py +../../zip_helpers.py jacoco_instr.py util/__init__.py util/build_utils.py diff --git a/build/android/gyp/java_cpp_enum.py b/build/android/gyp/java_cpp_enum.py index 08a381a96808..9098cfc82b1b 100755 --- a/build/android/gyp/java_cpp_enum.py +++ b/build/android/gyp/java_cpp_enum.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -16,6 +16,9 @@ from util import build_utils from util import java_cpp_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + # List of C++ types that are compatible with the Java code generated by this # script. @@ -28,7 +31,7 @@ ] -class EnumDefinition(object): +class EnumDefinition: def __init__(self, original_enum_name=None, class_name_override=None, enum_package=None, entries=None, comments=None, fixed_type=None): self.original_enum_name = original_enum_name @@ -79,9 +82,9 @@ def _AssignEntryIndices(self): else: try: self.entries[key] = int(value) - except ValueError: + except ValueError as e: raise Exception('Could not interpret integer from enum value "%s" ' - 'for key %s.' % (value, key)) + 'for key %s.' % (value, key)) from e prev_enum_value = self.entries[key] @@ -96,7 +99,7 @@ def _StripPrefix(self): 'k' + self.original_enum_name] for prefix in prefixes: - if all([w.startswith(prefix) for w in self.entries.keys()]): + if all(w.startswith(prefix) for w in self.entries.keys()): prefix_to_strip = prefix break else: @@ -141,7 +144,7 @@ def _TransformKeys(d, func): return ret -class DirectiveSet(object): +class DirectiveSet: class_name_override_key = 'CLASS_NAME_OVERRIDE' enum_package_key = 'ENUM_PACKAGE' prefix_to_strip_key = 'PREFIX_TO_STRIP' @@ -169,7 +172,7 @@ def UpdateDefinition(self, definition): DirectiveSet.prefix_to_strip_key) -class HeaderParser(object): +class HeaderParser: single_line_comment_re = re.compile(r'\s*//\s*([^\n]*)') multi_line_comment_start_re = re.compile(r'\s*/\*') enum_line_re = re.compile(r'^\s*(\w+)(\s*\=\s*([^,\n]+))?,?') @@ -305,7 +308,7 @@ def _ParseRegularLine(self, line): '. Use () for multi-line directives. E.g.\n' + '// GENERATED_JAVA_ENUM_PACKAGE: (\n' + '// foo.package)') - elif generator_directive: + if generator_directive: directive_name = generator_directive.groups()[0] directive_value = generator_directive.groups()[1] self._generator_directives.Update(directive_name, directive_value) @@ -427,10 +430,10 @@ def DoMain(argv): parser.error('Need to specify at least one input file') input_paths = args - with build_utils.AtomicOutput(options.srcjar) as f: + with action_helpers.atomic_output(options.srcjar) as f: with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar: for output_path, data in DoGenerate(input_paths): - build_utils.AddToZipHermetic(srcjar, output_path, data=data) + zip_helpers.add_to_zip_hermetic(srcjar, output_path, data=data) if __name__ == '__main__': diff --git a/build/android/gyp/java_cpp_enum.pydeps b/build/android/gyp/java_cpp_enum.pydeps index e6aaeb7b1f6f..3e63ff861d30 100644 --- a/build/android/gyp/java_cpp_enum.pydeps +++ b/build/android/gyp/java_cpp_enum.pydeps @@ -1,6 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_enum.pydeps build/android/gyp/java_cpp_enum.py +../../action_helpers.py ../../gn_helpers.py +../../zip_helpers.py java_cpp_enum.py util/__init__.py util/build_utils.py diff --git a/build/android/gyp/java_cpp_enum_tests.py b/build/android/gyp/java_cpp_enum_tests.py index 6d5f150fa0bc..c14f2a085edb 100755 --- a/build/android/gyp/java_cpp_enum_tests.py +++ b/build/android/gyp/java_cpp_enum_tests.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/gyp/java_cpp_features.py b/build/android/gyp/java_cpp_features.py index 8e7c2440d71b..10639a54895f 100755 --- a/build/android/gyp/java_cpp_features.py +++ b/build/android/gyp/java_cpp_features.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -12,14 +12,16 @@ from util import build_utils from util import java_cpp_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers class FeatureParserDelegate(java_cpp_utils.CppConstantParser.Delegate): - # Ex. 'const base::Feature kConstantName{"StringNameOfTheFeature", ...};' + # Ex. 'BASE_FEATURE(kConstantName, "StringNameOfTheFeature", ...);' # would parse as: # ExtractConstantName() -> 'ConstantName' # ExtractValue() -> '"StringNameOfTheFeature"' - FEATURE_RE = re.compile(r'\s*const (?:base::)?Feature\s+k(\w+)\s*(?:=\s*)?{') + FEATURE_RE = re.compile(r'BASE_FEATURE\(k([^,]+),') VALUE_RE = re.compile(r'\s*("(?:\"|[^"])*")\s*,') def ExtractConstantName(self, line): @@ -100,10 +102,10 @@ def _Main(argv): metavar='INPUTFILE') args = parser.parse_args(argv) - with build_utils.AtomicOutput(args.srcjar) as f: + with action_helpers.atomic_output(args.srcjar) as f: with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar: data, path = _Generate(args.inputs, args.template) - build_utils.AddToZipHermetic(srcjar, path, data=data) + zip_helpers.add_to_zip_hermetic(srcjar, path, data=data) if __name__ == '__main__': diff --git a/build/android/gyp/java_cpp_features.pydeps b/build/android/gyp/java_cpp_features.pydeps index acffae2bb935..4faa9033e29a 100644 --- a/build/android/gyp/java_cpp_features.pydeps +++ b/build/android/gyp/java_cpp_features.pydeps @@ -1,6 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_features.pydeps build/android/gyp/java_cpp_features.py +../../action_helpers.py ../../gn_helpers.py +../../zip_helpers.py java_cpp_features.py util/__init__.py util/build_utils.py diff --git a/build/android/gyp/java_cpp_features_tests.py b/build/android/gyp/java_cpp_features_tests.py index 5dcdcd8b8c33..3053955ec1ae 100755 --- a/build/android/gyp/java_cpp_features_tests.py +++ b/build/android/gyp/java_cpp_features_tests.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Tests for java_cpp_features.py. @@ -27,14 +27,14 @@ def testParseComments(self): // Comment followed by unrelated code. int foo() { return 3; } -// Real comment. -const base::Feature kSomeFeature{"SomeFeature", - base::FEATURE_DISABLED_BY_DEFAULT}; +// Real comment. base::Feature intentionally split across two lines. +BASE_FEATURE(kSomeFeature, "SomeFeature", + base::FEATURE_DISABLED_BY_DEFAULT); // Real comment that spans // multiple lines. -const base::Feature kSomeOtherFeature{"SomeOtherFeature", - base::FEATURE_ENABLED_BY_DEFAULT}; +BASE_FEATURE(kSomeOtherFeature, "SomeOtherFeature", + base::FEATURE_ENABLED_BY_DEFAULT); // Comment followed by nothing. """.split('\n') @@ -52,18 +52,18 @@ def testParseComments(self): def testWhitespace(self): test_data = """ // 1 line -const base::Feature kShort{"Short", base::FEATURE_DISABLED_BY_DEFAULT}; +BASE_FEATURE(kShort, "Short", base::FEATURE_DISABLED_BY_DEFAULT); // 2 lines -const base::Feature kTwoLineFeatureA{"TwoLineFeatureA", - base::FEATURE_DISABLED_BY_DEFAULT}; -const base::Feature kTwoLineFeatureB{ - "TwoLineFeatureB", base::FEATURE_DISABLED_BY_DEFAULT}; +BASE_FEATURE(kTwoLineFeatureA, "TwoLineFeatureA", + base::FEATURE_DISABLED_BY_DEFAULT); +BASE_FEATURE(kTwoLineFeatureB, + "TwoLineFeatureB", base::FEATURE_DISABLED_BY_DEFAULT); // 3 lines -const base::Feature kFeatureWithAVeryLongNameThatWillHaveToWrap{ +BASE_FEATURE(kFeatureWithAVeryLongNameThatWillHaveToWrap, "FeatureWithAVeryLongNameThatWillHaveToWrap", - base::FEATURE_DISABLED_BY_DEFAULT}; + base::FEATURE_DISABLED_BY_DEFAULT); """.split('\n') feature_file_parser = java_cpp_utils.CppConstantParser( java_cpp_features.FeatureParserDelegate(), test_data) @@ -83,64 +83,59 @@ def testWhitespace(self): def testCppSyntax(self): test_data = """ // Mismatched name -const base::Feature kMismatchedFeature{"MismatchedName", - base::FEATURE_DISABLED_BY_DEFAULT}; +BASE_FEATURE(kMismatchedFeature, "MismatchedName", + base::FEATURE_DISABLED_BY_DEFAULT); namespace myfeature { // In a namespace -const base::Feature kSomeFeature{"SomeFeature", - base::FEATURE_DISABLED_BY_DEFAULT}; +BASE_FEATURE(kSomeFeature, "SomeFeature", + base::FEATURE_DISABLED_BY_DEFAULT); } -// Defined with equals sign -const base::Feature kFoo = {"Foo", base::FEATURE_DISABLED_BY_DEFAULT}; - // Build config-specific base::Feature -#if defined(OS_ANDROID) -const base::Feature kAndroidOnlyFeature{"AndroidOnlyFeature", - base::FEATURE_DISABLED_BY_DEFAULT}; +#if BUILDFLAG(IS_ANDROID) +BASE_FEATURE(kAndroidOnlyFeature, "AndroidOnlyFeature", + base::FEATURE_DISABLED_BY_DEFAULT); #endif // Value depends on build config -const base::Feature kMaybeEnabled{"MaybeEnabled", -#if defined(OS_ANDROID) +BASE_FEATURE(kMaybeEnabled, "MaybeEnabled", +#if BUILDFLAG(IS_ANDROID) base::FEATURE_DISABLED_BY_DEFAULT #else base::FEATURE_ENABLED_BY_DEFAULT #endif -}; +); """.split('\n') feature_file_parser = java_cpp_utils.CppConstantParser( java_cpp_features.FeatureParserDelegate(), test_data) features = feature_file_parser.Parse() - self.assertEqual(5, len(features)) + self.assertEqual(4, len(features)) self.assertEqual('MISMATCHED_FEATURE', features[0].name) self.assertEqual('"MismatchedName"', features[0].value) self.assertEqual('SOME_FEATURE', features[1].name) self.assertEqual('"SomeFeature"', features[1].value) - self.assertEqual('FOO', features[2].name) - self.assertEqual('"Foo"', features[2].value) - self.assertEqual('ANDROID_ONLY_FEATURE', features[3].name) - self.assertEqual('"AndroidOnlyFeature"', features[3].value) - self.assertEqual('MAYBE_ENABLED', features[4].name) - self.assertEqual('"MaybeEnabled"', features[4].value) + self.assertEqual('ANDROID_ONLY_FEATURE', features[2].name) + self.assertEqual('"AndroidOnlyFeature"', features[2].value) + self.assertEqual('MAYBE_ENABLED', features[3].name) + self.assertEqual('"MaybeEnabled"', features[3].value) def testNotYetSupported(self): # Negative test for cases we don't yet support, to ensure we don't misparse # these until we intentionally add proper support. test_data = """ // Not currently supported: name depends on C++ directive -const base::Feature kNameDependsOnOs{ -#if defined(OS_ANDROID) +BASE_FEATURE(kNameDependsOnOs, +#if BUILDFLAG(IS_ANDROID) "MaybeName1", #else "MaybeName2", #endif - base::FEATURE_DISABLED_BY_DEFAULT}; + base::FEATURE_DISABLED_BY_DEFAULT); // Not currently supported: feature named with a constant instead of literal -const base::Feature kNamedAfterConstant{kNamedStringConstant, - base::FEATURE_DISABLED_BY_DEFAULT}; +BASE_FEATURE(kNamedAfterConstant, kNamedStringConstant, + base::FEATURE_DISABLED_BY_DEFAULT}; """.split('\n') feature_file_parser = java_cpp_utils.CppConstantParser( java_cpp_features.FeatureParserDelegate(), test_data) @@ -149,13 +144,13 @@ def testNotYetSupported(self): def testTreatWebViewLikeOneWord(self): test_data = """ -const base::Feature kSomeWebViewFeature{"SomeWebViewFeature", - base::FEATURE_DISABLED_BY_DEFAULT}; -const base::Feature kWebViewOtherFeature{"WebViewOtherFeature", - base::FEATURE_ENABLED_BY_DEFAULT}; -const base::Feature kFeatureWithPluralWebViews{ +BASE_FEATURE(kSomeWebViewFeature, "SomeWebViewFeature", + base::FEATURE_DISABLED_BY_DEFAULT); +BASE_FEATURE(kWebViewOtherFeature, "WebViewOtherFeature", + base::FEATURE_ENABLED_BY_DEFAULT); +BASE_FEATURE(kFeatureWithPluralWebViews, "FeatureWithPluralWebViews", - base::FEATURE_ENABLED_BY_DEFAULT}; + base::FEATURE_ENABLED_BY_DEFAULT); """.split('\n') feature_file_parser = java_cpp_utils.CppConstantParser( java_cpp_features.FeatureParserDelegate(), test_data) @@ -169,11 +164,11 @@ def testTreatWebViewLikeOneWord(self): def testSpecialCharacters(self): test_data = r""" -const base::Feature kFeatureWithEscapes{"Weird\tfeature\"name\n", - base::FEATURE_DISABLED_BY_DEFAULT}; -const base::Feature kFeatureWithEscapes2{ +BASE_FEATURE(kFeatureWithEscapes, "Weird\tfeature\"name\n", + base::FEATURE_DISABLED_BY_DEFAULT); +BASE_FEATURE(kFeatureWithEscapes2, "Weird\tfeature\"name\n", - base::FEATURE_ENABLED_BY_DEFAULT}; + base::FEATURE_ENABLED_BY_DEFAULT); """.split('\n') feature_file_parser = java_cpp_utils.CppConstantParser( java_cpp_features.FeatureParserDelegate(), test_data) @@ -183,16 +178,6 @@ def testSpecialCharacters(self): self.assertEqual('FEATURE_WITH_ESCAPES2', features[1].name) self.assertEqual(r'"Weird\tfeature\"name\n"', features[1].value) - def testNoBaseNamespacePrefix(self): - test_data = """ -const Feature kSomeFeature{"SomeFeature", FEATURE_DISABLED_BY_DEFAULT}; -""".split('\n') - feature_file_parser = java_cpp_utils.CppConstantParser( - java_cpp_features.FeatureParserDelegate(), test_data) - features = feature_file_parser.Parse() - self.assertEqual('SOME_FEATURE', features[0].name) - self.assertEqual('"SomeFeature"', features[0].value) - if __name__ == '__main__': unittest.main() diff --git a/build/android/gyp/java_cpp_strings.py b/build/android/gyp/java_cpp_strings.py index d7135997930b..c3d05de6c64d 100755 --- a/build/android/gyp/java_cpp_strings.py +++ b/build/android/gyp/java_cpp_strings.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -12,6 +12,8 @@ from util import build_utils from util import java_cpp_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers class StringParserDelegate(java_cpp_utils.CppConstantParser.Delegate): @@ -93,10 +95,10 @@ def _Main(argv): 'inputs', nargs='+', help='Input file(s)', metavar='INPUTFILE') args = parser.parse_args(argv) - with build_utils.AtomicOutput(args.srcjar) as f: + with action_helpers.atomic_output(args.srcjar) as f: with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar: data, path = _Generate(args.inputs, args.template) - build_utils.AddToZipHermetic(srcjar, path, data=data) + zip_helpers.add_to_zip_hermetic(srcjar, path, data=data) if __name__ == '__main__': diff --git a/build/android/gyp/java_cpp_strings.pydeps b/build/android/gyp/java_cpp_strings.pydeps index 0a821f44694d..39b299e4b1da 100644 --- a/build/android/gyp/java_cpp_strings.pydeps +++ b/build/android/gyp/java_cpp_strings.pydeps @@ -1,6 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_strings.pydeps build/android/gyp/java_cpp_strings.py +../../action_helpers.py ../../gn_helpers.py +../../zip_helpers.py java_cpp_strings.py util/__init__.py util/build_utils.py diff --git a/build/android/gyp/java_cpp_strings_tests.py b/build/android/gyp/java_cpp_strings_tests.py index 4cb1eeeae7a6..793b2c310dee 100755 --- a/build/android/gyp/java_cpp_strings_tests.py +++ b/build/android/gyp/java_cpp_strings_tests.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -130,7 +130,7 @@ def testTreatWebViewLikeOneWord(self): def testTemplateParsing(self): test_data = """ -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/gyp/java_google_api_keys.py b/build/android/gyp/java_google_api_keys.py index a58628a78f81..4e4fa1998b18 100755 --- a/build/android/gyp/java_google_api_keys.py +++ b/build/android/gyp/java_google_api_keys.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -13,6 +13,7 @@ import zipfile from util import build_utils +import zip_helpers sys.path.append( os.path.abspath(os.path.join(sys.path[0], '../../../google_apis'))) @@ -29,7 +30,7 @@ def GetScriptName(): def GenerateOutput(constant_definitions): template = string.Template(""" -// Copyright 2015 The Chromium Authors. All rights reserved. +// Copyright 2015 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -81,7 +82,7 @@ def _DoWriteJarOutput(output_path, constant_definition): with zipfile.ZipFile(output_path, 'w') as srcjar: path = '%s/%s' % (PACKAGE.replace('.', '/'), CLASSNAME + '.java') data = GenerateOutput(constant_definition) - build_utils.AddToZipHermetic(srcjar, path, data=data) + zip_helpers.add_to_zip_hermetic(srcjar, path, data=data) def _DoMain(argv): @@ -95,14 +96,10 @@ def _DoMain(argv): values = {} values['GOOGLE_API_KEY'] = google_api_keys.GetAPIKey() - values['GOOGLE_API_KEY_PHYSICAL_WEB_TEST'] = (google_api_keys. - GetAPIKeyPhysicalWebTest()) + values['GOOGLE_API_KEY_ANDROID_NON_STABLE'] = ( + google_api_keys.GetAPIKeyAndroidNonStable()) values['GOOGLE_CLIENT_ID_MAIN'] = google_api_keys.GetClientID('MAIN') values['GOOGLE_CLIENT_SECRET_MAIN'] = google_api_keys.GetClientSecret('MAIN') - values['GOOGLE_CLIENT_ID_CLOUD_PRINT'] = google_api_keys.GetClientID( - 'CLOUD_PRINT') - values['GOOGLE_CLIENT_SECRET_CLOUD_PRINT'] = google_api_keys.GetClientSecret( - 'CLOUD_PRINT') values['GOOGLE_CLIENT_ID_REMOTING'] = google_api_keys.GetClientID('REMOTING') values['GOOGLE_CLIENT_SECRET_REMOTING'] = google_api_keys.GetClientSecret( 'REMOTING') @@ -110,8 +107,6 @@ def _DoMain(argv): 'REMOTING_HOST') values['GOOGLE_CLIENT_SECRET_REMOTING_HOST'] = (google_api_keys. GetClientSecret('REMOTING_HOST')) - values['GOOGLE_CLIENT_ID_REMOTING_IDENTITY_API'] = (google_api_keys. - GetClientID('REMOTING_IDENTITY_API')) if options.out: _DoWriteJavaOutput(options.out, values) diff --git a/build/android/gyp/java_google_api_keys.pydeps b/build/android/gyp/java_google_api_keys.pydeps index ebb717273f6c..6c027a19d944 100644 --- a/build/android/gyp/java_google_api_keys.pydeps +++ b/build/android/gyp/java_google_api_keys.pydeps @@ -2,6 +2,7 @@ # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_google_api_keys.pydeps build/android/gyp/java_google_api_keys.py ../../../google_apis/google_api_keys.py ../../gn_helpers.py +../../zip_helpers.py java_google_api_keys.py util/__init__.py util/build_utils.py diff --git a/build/android/gyp/java_google_api_keys_tests.py b/build/android/gyp/java_google_api_keys_tests.py index e00e86cb743b..0610178d8582 100755 --- a/build/android/gyp/java_google_api_keys_tests.py +++ b/build/android/gyp/java_google_api_keys_tests.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -19,7 +19,7 @@ def testOutput(self): definition = {'E1': 'abc', 'E2': 'defgh'} output = java_google_api_keys.GenerateOutput(definition) expected = """ -// Copyright 2015 The Chromium Authors. All rights reserved. +// Copyright 2015 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/gyp/javac_output_processor.py b/build/android/gyp/javac_output_processor.py new file mode 100755 index 000000000000..6faf5de5abc9 --- /dev/null +++ b/build/android/gyp/javac_output_processor.py @@ -0,0 +1,216 @@ +#!/usr/bin/env python3 +# +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Contains helper class for processing javac output.""" + +import dataclasses +import os +import pathlib +import re +import sys +import traceback +from typing import List + +from util import build_utils + +sys.path.insert( + 0, + os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src')) +import colorama +sys.path.insert( + 0, + os.path.join(build_utils.DIR_SOURCE_ROOT, 'tools', 'android', + 'modularization', 'convenience')) +import lookup_dep + + +def ReplaceGmsPackageIfNeeded(target_name: str) -> str: + if target_name.startswith( + ('//third_party/android_deps:google_play_services_', + '//clank/third_party/google3:google_play_services_')): + return f'$google_play_services_package:{target_name.split(":")[1]}' + return target_name + + +def _DisambiguateDeps(class_entries: List[lookup_dep.ClassEntry]): + def filter_if_not_empty(entries, filter_func): + filtered_entries = [e for e in entries if filter_func(e)] + return filtered_entries or entries + + # When some deps are preferred, ignore all other potential deps. + class_entries = filter_if_not_empty(class_entries, lambda e: e.preferred_dep) + + # E.g. javax_annotation_jsr250_api_java. + class_entries = filter_if_not_empty(class_entries, + lambda e: 'jsr' in e.target) + + # Avoid suggesting subtargets when regular targets exist. + class_entries = filter_if_not_empty(class_entries, + lambda e: '__' not in e.target) + + # Swap out GMS package names if needed. + class_entries = [ + dataclasses.replace(e, target=ReplaceGmsPackageIfNeeded(e.target)) + for e in class_entries + ] + + # Convert to dict and then use list to get the keys back to remove dups and + # keep order the same as before. + class_entries = list({e: True for e in class_entries}) + + return class_entries + + +class JavacOutputProcessor: + def __init__(self, target_name): + self._target_name = self._RemoveSuffixesIfPresent( + ["__compile_java", "__errorprone", "__header"], target_name) + self._suggested_deps = set() + + # Example: ../../ui/android/java/src/org/chromium/ui/base/Clipboard.java:45: + fileline_prefix = ( + r'(?P(?P[-.\w/\\]+.java):(?P[0-9]+):)') + + self._warning_re = re.compile( + fileline_prefix + r'(?P warning: (?P.*))$') + self._error_re = re.compile(fileline_prefix + + r'(?P (?P.*))$') + self._marker_re = re.compile(r'\s*(?P\^)\s*$') + + self._symbol_not_found_re_list = [ + # Example: + # error: package org.chromium.components.url_formatter does not exist + re.compile(fileline_prefix + + r'( error: package [\w.]+ does not exist)$'), + # Example: error: cannot find symbol + re.compile(fileline_prefix + r'( error: cannot find symbol)$'), + # Example: error: symbol not found org.chromium.url.GURL + re.compile(fileline_prefix + r'( error: symbol not found [\w.]+)$'), + ] + + # Example: import org.chromium.url.GURL; + self._import_re = re.compile(r'\s*import (?P[\w\.]+);$') + + self._warning_color = [ + 'full_message', colorama.Fore.YELLOW + colorama.Style.DIM + ] + self._error_color = [ + 'full_message', colorama.Fore.MAGENTA + colorama.Style.BRIGHT + ] + self._marker_color = ['marker', colorama.Fore.BLUE + colorama.Style.BRIGHT] + + self._class_lookup_index = None + + colorama.init() + + def Process(self, lines): + """ Processes javac output. + + - Applies colors to output. + - Suggests GN dep to add for 'unresolved symbol in Java import' errors. + """ + lines = self._ElaborateLinesForUnknownSymbol(iter(lines)) + for line in lines: + yield self._ApplyColors(line) + if self._suggested_deps: + + def yellow(text): + return colorama.Fore.YELLOW + text + colorama.Fore.RESET + + # Show them in quotes so they can be copy/pasted into BUILD.gn files. + yield yellow('Hint:') + ' One or more errors due to missing GN deps.' + yield (yellow('Hint:') + ' Try adding the following to ' + + yellow(self._target_name)) + for dep in sorted(self._suggested_deps): + yield ' "{}",'.format(dep) + + def _ElaborateLinesForUnknownSymbol(self, lines): + """ Elaborates passed-in javac output for unresolved symbols. + + Looks for unresolved symbols in imports. + Adds: + - Line with GN target which cannot compile. + - Mention of unresolved class if not present in error message. + - Line with suggestion of GN dep to add. + + Args: + lines: Generator with javac input. + Returns: + Generator with processed output. + """ + previous_line = next(lines, None) + line = next(lines, None) + while previous_line != None: + try: + self._LookForUnknownSymbol(previous_line, line) + except Exception: + elaborated_lines = ['Error in _LookForUnknownSymbol ---'] + elaborated_lines += traceback.format_exc().splitlines() + elaborated_lines += ['--- end _LookForUnknownSymbol error'] + for elaborated_line in elaborated_lines: + yield elaborated_line + + yield previous_line + previous_line = line + line = next(lines, None) + + def _ApplyColors(self, line): + """Adds colors to passed-in line and returns processed line.""" + if self._warning_re.match(line): + line = self._Colorize(line, self._warning_re, self._warning_color) + elif self._error_re.match(line): + line = self._Colorize(line, self._error_re, self._error_color) + elif self._marker_re.match(line): + line = self._Colorize(line, self._marker_re, self._marker_color) + return line + + def _LookForUnknownSymbol(self, line, next_line): + if not next_line: + return + + import_re_match = self._import_re.match(next_line) + if not import_re_match: + return + + for regex in self._symbol_not_found_re_list: + if regex.match(line): + break + else: + return + + if self._class_lookup_index is None: + self._class_lookup_index = lookup_dep.ClassLookupIndex( + pathlib.Path(os.getcwd()), + should_build=False, + ) + + class_to_lookup = import_re_match.group('imported_class') + suggested_deps = self._class_lookup_index.match(class_to_lookup) + + if not suggested_deps: + return + + suggested_deps = _DisambiguateDeps(suggested_deps) + suggested_deps_str = ', '.join(s.target for s in suggested_deps) + + if len(suggested_deps) > 1: + suggested_deps_str = 'one of: ' + suggested_deps_str + + self._suggested_deps.add(suggested_deps_str) + + @staticmethod + def _RemoveSuffixesIfPresent(suffixes, text): + for suffix in suffixes: + if text.endswith(suffix): + return text[:-len(suffix)] + return text + + @staticmethod + def _Colorize(line, regex, color): + match = regex.match(line) + start = match.start(color[0]) + end = match.end(color[0]) + return (line[:start] + color[1] + line[start:end] + colorama.Fore.RESET + + colorama.Style.RESET_ALL + line[end:]) diff --git a/build/android/gyp/jetify_jar.py b/build/android/gyp/jetify_jar.py deleted file mode 100755 index e97ad97d99ed..000000000000 --- a/build/android/gyp/jetify_jar.py +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2019 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from __future__ import print_function - -import argparse -import os -import subprocess -import sys - -from util import build_utils - - -def _AddArguments(parser): - """Adds arguments related to jetifying to parser. - - Args: - parser: ArgumentParser object. - """ - parser.add_argument( - '--input-path', - required=True, - help='Path to input file(s). Either the classes ' - 'directory, or the path to a jar.') - parser.add_argument( - '--output-path', - required=True, - help='Path to output final file(s) to. Either the ' - 'final classes directory, or the directory in ' - 'which to place the instrumented/copied jar.') - parser.add_argument( - '--jetify-path', required=True, help='Path to jetify bin.') - parser.add_argument( - '--jetify-config-path', required=True, help='Path to jetify config file.') - - -def _RunJetifyCommand(parser): - args = parser.parse_args() - cmd = [ - args.jetify_path, - '-i', - args.input_path, - '-o', - args.output_path, - # Need to suppress a lot of warning output when jar doesn't have - # any references rewritten. - '-l', - 'error' - ] - if args.jetify_config_path: - cmd.extend(['-c', args.jetify_config_path]) - # Must wait for jetify command to complete to prevent race condition. - env = os.environ.copy() - env['JAVA_HOME'] = build_utils.JAVA_HOME - subprocess.check_call(cmd, env=env) - - -def main(): - parser = argparse.ArgumentParser() - _AddArguments(parser) - _RunJetifyCommand(parser) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/build/android/gyp/jinja_template.py b/build/android/gyp/jinja_template.py index d42189ba38b0..4a242683a6f5 100755 --- a/build/android/gyp/jinja_template.py +++ b/build/android/gyp/jinja_template.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -13,6 +13,8 @@ from util import build_utils from util import resource_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir)) from pylib.constants import host_paths @@ -34,7 +36,7 @@ def get_source(self, environment, template): return contents, filename, uptodate -class JinjaProcessor(object): +class JinjaProcessor: """Allows easy rendering of jinja templates with input file tracking.""" def __init__(self, loader_base_dir, variables=None): self.loader_base_dir = loader_base_dir @@ -90,12 +92,13 @@ def _ProcessFiles(processor, input_filenames, inputs_base_dir, outputs_zip): path_info.AddMapping(relpath, input_filename) path_info.Write(outputs_zip + '.info') - build_utils.ZipDir(outputs_zip, temp_dir) + with action_helpers.atomic_output(outputs_zip) as f: + zip_helpers.zip_directory(f, temp_dir) def _ParseVariables(variables_arg, error_func): variables = {} - for v in build_utils.ParseGnList(variables_arg): + for v in action_helpers.parse_gn_list(variables_arg): if '=' not in v: error_func('--variables argument must contain "=": ' + v) name, _, value = v.partition('=') @@ -128,8 +131,8 @@ def main(): help='Enable inputs and includes checks.') options = parser.parse_args() - inputs = build_utils.ParseGnList(options.inputs) - includes = build_utils.ParseGnList(options.includes) + inputs = action_helpers.parse_gn_list(options.inputs) + includes = action_helpers.parse_gn_list(options.includes) if (options.output is None) == (options.outputs_zip is None): parser.error('Exactly one of --output and --output-zip must be given') diff --git a/build/android/gyp/jinja_template.pydeps b/build/android/gyp/jinja_template.pydeps index af22c400243f..1eafd884a28a 100644 --- a/build/android/gyp/jinja_template.pydeps +++ b/build/android/gyp/jinja_template.pydeps @@ -10,9 +10,8 @@ ../../../third_party/catapult/devil/devil/constants/__init__.py ../../../third_party/catapult/devil/devil/constants/exit_codes.py ../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py -../../../third_party/jinja2/asyncfilters.py -../../../third_party/jinja2/asyncsupport.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py ../../../third_party/jinja2/bccache.py ../../../third_party/jinja2/compiler.py ../../../third_party/jinja2/defaults.py @@ -32,7 +31,9 @@ ../../../third_party/markupsafe/__init__.py ../../../third_party/markupsafe/_compat.py ../../../third_party/markupsafe/_native.py +../../action_helpers.py ../../gn_helpers.py +../../zip_helpers.py ../pylib/__init__.py ../pylib/constants/__init__.py ../pylib/constants/host_paths.py diff --git a/build/android/gyp/lint.py b/build/android/gyp/lint.py index 87d45d651d84..ae26a18085df 100755 --- a/build/android/gyp/lint.py +++ b/build/android/gyp/lint.py @@ -1,27 +1,23 @@ #!/usr/bin/env python3 # -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Runs Android's lint tool.""" -from __future__ import print_function - import argparse -import functools import logging import os -import re import shutil import sys import time -import traceback from xml.dom import minidom from xml.etree import ElementTree from util import build_utils from util import manifest_utils from util import server_utils +import action_helpers # build_utils adds //build to sys.path. _LINT_MD_URL = 'https://chromium.googlesource.com/chromium/src/+/main/build/android/docs/lint.md' # pylint: disable=line-too-long @@ -32,11 +28,13 @@ "InflateParams", # Null is ok when inflating views for dialogs. "InlinedApi", # Constants are copied so they are always available. "LintBaseline", # Don't warn about using baseline.xml files. + "MissingInflatedId", # False positives https://crbug.com/1394222 "MissingApplicationIcon", # False positive for non-production targets. + "ObsoleteLintCustomCheck", # We have no control over custom lint checks. "SwitchIntDef", # Many C++ enums are not used at all in java. + "Typos", # Strings are committed in English first and later translated. "UniqueConstants", # Chromium enums allow aliases. "UnusedAttribute", # Chromium apks have various minSdkVersion values. - "ObsoleteLintCustomCheck", # We have no control over custom lint checks. ] # These checks are not useful for test targets and adds an unnecessary burden @@ -80,7 +78,8 @@ def _GenerateProjectFile(android_manifest, resource_sources=None, custom_lint_jars=None, custom_annotation_zips=None, - android_sdk_version=None): + android_sdk_version=None, + baseline_path=None): project = ElementTree.Element('project') root = ElementTree.SubElement(project, 'root') # Run lint from output directory: crbug.com/1115594 @@ -88,6 +87,9 @@ def _GenerateProjectFile(android_manifest, sdk = ElementTree.SubElement(project, 'sdk') # Lint requires that the sdk path be an absolute path. sdk.set('dir', os.path.abspath(android_sdk_root)) + if baseline_path is not None: + baseline = ElementTree.SubElement(project, 'baseline') + baseline.set('file', baseline_path) cache = ElementTree.SubElement(project, 'cache') cache.set('dir', cache_dir) main_module = ElementTree.SubElement(project, 'module') @@ -163,12 +165,6 @@ def _GenerateAndroidManifest(original_manifest_path, extra_manifest_paths, for node in extra_app_node: app_node.append(node) - if app_node.find( - '{%s}allowBackup' % manifest_utils.ANDROID_NAMESPACE) is None: - # Assume no backup is intended, appeases AllowBackup lint check and keeping - # it working for manifests that do define android:allowBackup. - app_node.set('{%s}allowBackup' % manifest_utils.ANDROID_NAMESPACE, 'false') - uses_sdk = manifest.find('./uses-sdk') if uses_sdk is None: uses_sdk = ElementTree.Element('uses-sdk') @@ -183,7 +179,7 @@ def _GenerateAndroidManifest(original_manifest_path, extra_manifest_paths, def _WriteXmlFile(root, path): logging.info('Writing xml file %s', path) build_utils.MakeDirectory(os.path.dirname(path)) - with build_utils.AtomicOutput(path) as f: + with action_helpers.atomic_output(path) as f: # Although we can write it just with ElementTree.tostring, using minidom # makes it a lot easier to read as a human (also on code search). f.write( @@ -191,7 +187,9 @@ def _WriteXmlFile(root, path): root, encoding='utf-8')).toprettyxml(indent=' ').encode('utf-8')) -def _RunLint(lint_binary_path, +def _RunLint(create_cache, + custom_lint_jar_path, + lint_jar_path, backported_methods_path, config_path, manifest_path, @@ -212,21 +210,46 @@ def _RunLint(lint_binary_path, warnings_as_errors=False): logging.info('Lint starting') - cmd = [ - lint_binary_path, + if create_cache: + # Occasionally lint may crash due to re-using intermediate files from older + # lint runs. See https://crbug.com/1258178 for context. + logging.info('Clearing cache dir %s before creating cache.', cache_dir) + shutil.rmtree(cache_dir, ignore_errors=True) + os.makedirs(cache_dir) + + if baseline and not os.path.exists(baseline): + # Generating new baselines is only done locally, and requires more memory to + # avoid OOMs. + creating_baseline = True + lint_xmx = '4G' + else: + creating_baseline = False + lint_xmx = '2G' + + # All paths in lint are based off of relative paths from root with root as the + # prefix. Path variable substitution is based off of prefix matching so custom + # path variables need to match exactly in order to show up in baseline files. + # e.g. lint_path=path/to/output/dir/../../file/in/src + root_path = os.getcwd() # This is usually the output directory. + pathvar_src = os.path.join( + root_path, os.path.relpath(build_utils.DIR_SOURCE_ROOT, start=root_path)) + + cmd = build_utils.JavaCmd(xmx=lint_xmx) + [ + '-cp', + '{}:{}'.format(lint_jar_path, custom_lint_jar_path), + 'org.chromium.build.CustomLint', + '--sdk-home', + android_sdk_root, + '--jdk-home', + build_utils.JAVA_HOME, + '--path-variables', + f'SRC={pathvar_src}', '--quiet', # Silences lint's "." progress updates. + '--stacktrace', # Prints full stacktraces for internal lint errors. '--disable', ','.join(_DISABLED_ALWAYS), ] - # Crashes lint itself, see b/187524311 - # Only disable if we depend on androidx.fragment (otherwise lint fails due to - # non-existent check). - if any('androidx_fragment_fragment' in aar for aar in aars): - cmd.extend(['--disable', 'DialogFragmentCallbacksDetector']) - - if baseline: - cmd.extend(['--baseline', baseline]) if testonly_target: cmd.extend(['--disable', ','.join(_DISABLED_FOR_TESTS)]) @@ -302,27 +325,12 @@ def _RunLint(lint_binary_path, classpath, srcjar_sources, resource_sources, custom_lint_jars, custom_annotation_zips, - android_sdk_version) + android_sdk_version, baseline) project_xml_path = os.path.join(lint_gen_dir, 'project.xml') _WriteXmlFile(project_file_root, project_xml_path) cmd += ['--project', project_xml_path] - logging.info('Preparing environment variables') - env = os.environ.copy() - # It is important that lint uses the checked-in JDK11 as it is almost 50% - # faster than JDK8. - env['JAVA_HOME'] = build_utils.JAVA_HOME - # This is necessary so that lint errors print stack traces in stdout. - env['LINT_PRINT_STACKTRACE'] = 'true' - if baseline and not os.path.exists(baseline): - # Generating new baselines is only done locally, and requires more memory to - # avoid OOMs. - env['LINT_OPTS'] = '-Xmx4g' - else: - # The default set in the wrapper script is 1g, but it seems not enough :( - env['LINT_OPTS'] = '-Xmx2g' - # This filter is necessary for JDK11. stderr_filter = build_utils.FilterReflectiveAccessJavaWarnings stdout_filter = lambda x: build_utils.FilterLines(x, 'No issues found') @@ -330,14 +338,21 @@ def _RunLint(lint_binary_path, start = time.time() logging.debug('Lint command %s', ' '.join(cmd)) failed = True + + if creating_baseline and not warnings_as_errors: + # Allow error code 6 when creating a baseline: ERRNO_CREATED_BASELINE + fail_func = lambda returncode, _: returncode not in (0, 6) + else: + fail_func = lambda returncode, _: returncode != 0 + try: failed = bool( build_utils.CheckOutput(cmd, - env=env, print_stdout=True, stdout_filter=stdout_filter, stderr_filter=stderr_filter, - fail_on_output=warnings_as_errors)) + fail_on_output=warnings_as_errors, + fail_func=fail_func)) finally: # When not treating warnings as errors, display the extra footer. is_debug = os.environ.get('LINT_DEBUG', '0') != '0' @@ -363,14 +378,20 @@ def _RunLint(lint_binary_path, def _ParseArgs(argv): parser = argparse.ArgumentParser() - build_utils.AddDepfileOption(parser) + action_helpers.add_depfile_arg(parser) parser.add_argument('--target-name', help='Fully qualified GN target name.') parser.add_argument('--skip-build-server', action='store_true', help='Avoid using the build server.') - parser.add_argument('--lint-binary-path', + parser.add_argument('--use-build-server', + action='store_true', + help='Always use the build server.') + parser.add_argument('--lint-jar-path', required=True, - help='Path to lint executable.') + help='Path to the lint jar.') + parser.add_argument('--custom-lint-jar-path', + required=True, + help='Path to our custom lint jar.') parser.add_argument('--backported-methods', help='Path to backported methods file created by R8.') parser.add_argument('--cache-dir', @@ -402,8 +423,9 @@ def _ParseArgs(argv): parser.add_argument('--warnings-as-errors', action='store_true', help='Treat all warnings as errors.') - parser.add_argument('--java-sources', - help='File containing a list of java sources files.') + parser.add_argument('--sources', + help='A list of files containing java and kotlin source ' + 'files.') parser.add_argument('--aars', help='GN list of included aars.') parser.add_argument('--srcjars', help='GN list of included srcjars.') parser.add_argument('--manifest-path', @@ -429,13 +451,21 @@ def _ParseArgs(argv): 'on new errors.') args = parser.parse_args(build_utils.ExpandFileArgs(argv)) - args.java_sources = build_utils.ParseGnList(args.java_sources) - args.aars = build_utils.ParseGnList(args.aars) - args.srcjars = build_utils.ParseGnList(args.srcjars) - args.resource_sources = build_utils.ParseGnList(args.resource_sources) - args.extra_manifest_paths = build_utils.ParseGnList(args.extra_manifest_paths) - args.resource_zips = build_utils.ParseGnList(args.resource_zips) - args.classpath = build_utils.ParseGnList(args.classpath) + args.sources = action_helpers.parse_gn_list(args.sources) + args.aars = action_helpers.parse_gn_list(args.aars) + args.srcjars = action_helpers.parse_gn_list(args.srcjars) + args.resource_sources = action_helpers.parse_gn_list(args.resource_sources) + args.extra_manifest_paths = action_helpers.parse_gn_list( + args.extra_manifest_paths) + args.resource_zips = action_helpers.parse_gn_list(args.resource_zips) + args.classpath = action_helpers.parse_gn_list(args.classpath) + + if args.baseline: + assert os.path.basename(args.baseline) == 'lint-baseline.xml', ( + 'The baseline file needs to be named "lint-baseline.xml" in order for ' + 'the autoroller to find and update it whenever lint is rolled to a new ' + 'version.') + return args @@ -448,13 +478,15 @@ def main(): # Avoid parallelizing cache creation since lint runs without the cache defeat # the purpose of creating the cache in the first place. if (not args.create_cache and not args.skip_build_server - and server_utils.MaybeRunCommand( - name=args.target_name, argv=sys.argv, stamp_file=args.stamp)): + and server_utils.MaybeRunCommand(name=args.target_name, + argv=sys.argv, + stamp_file=args.stamp, + force=args.use_build_server)): return sources = [] - for java_sources_file in args.java_sources: - sources.extend(build_utils.ReadSourcesList(java_sources_file)) + for sources_file in args.sources: + sources.extend(build_utils.ReadSourcesList(sources_file)) resource_sources = [] for resource_sources_file in args.resource_sources: resource_sources.extend(build_utils.ReadSourcesList(resource_sources_file)) @@ -466,7 +498,9 @@ def main(): ]) depfile_deps = [p for p in possible_depfile_deps if p] - _RunLint(args.lint_binary_path, + _RunLint(args.create_cache, + args.custom_lint_jar_path, + args.lint_jar_path, args.backported_methods, args.config_path, args.manifest_path, @@ -489,7 +523,7 @@ def main(): build_utils.Touch(args.stamp) if args.depfile: - build_utils.WriteDepfile(args.depfile, args.stamp, depfile_deps) + action_helpers.write_depfile(args.depfile, args.stamp, depfile_deps) if __name__ == '__main__': diff --git a/build/android/gyp/lint.pydeps b/build/android/gyp/lint.pydeps index 0994e19a4ae8..84bafde4795d 100644 --- a/build/android/gyp/lint.pydeps +++ b/build/android/gyp/lint.pydeps @@ -1,5 +1,6 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/lint.pydeps build/android/gyp/lint.py +../../action_helpers.py ../../gn_helpers.py lint.py util/__init__.py diff --git a/build/android/gyp/merge_manifest.py b/build/android/gyp/merge_manifest.py index 9d77aa5fe6be..a9c2535ae851 100755 --- a/build/android/gyp/merge_manifest.py +++ b/build/android/gyp/merge_manifest.py @@ -1,12 +1,13 @@ #!/usr/bin/env python3 -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Merges dependency Android manifests into a root manifest.""" import argparse +import collections import contextlib import os import sys @@ -15,57 +16,62 @@ from util import build_utils from util import manifest_utils +import action_helpers # build_utils adds //build to sys.path. _MANIFEST_MERGER_MAIN_CLASS = 'com.android.manifmerger.Merger' -_MANIFEST_MERGER_JARS = [ - os.path.join('build-system', 'manifest-merger.jar'), - os.path.join('common', 'common.jar'), - os.path.join('sdk-common', 'sdk-common.jar'), - os.path.join('sdklib', 'sdklib.jar'), - os.path.join('external', 'com', 'google', 'guava', 'guava', '28.1-jre', - 'guava-28.1-jre.jar'), - os.path.join('external', 'kotlin-plugin-ij', 'Kotlin', 'kotlinc', 'lib', - 'kotlin-stdlib.jar'), - os.path.join('external', 'com', 'google', 'code', 'gson', 'gson', '2.8.5', - 'gson-2.8.5.jar'), -] @contextlib.contextmanager -def _ProcessManifest(manifest_path, min_sdk_version, target_sdk_version, - max_sdk_version, manifest_package): - """Patches an Android manifest's package and performs assertions to ensure - correctness for the manifest. - """ +def _ProcessMainManifest(manifest_path, min_sdk_version, target_sdk_version, + max_sdk_version, manifest_package): + """Patches the main Android manifest""" doc, manifest, _ = manifest_utils.ParseManifest(manifest_path) - manifest_utils.AssertUsesSdk(manifest, min_sdk_version, target_sdk_version, - max_sdk_version) + manifest_utils.SetUsesSdk(manifest, target_sdk_version, min_sdk_version, + max_sdk_version) assert manifest_utils.GetPackage(manifest) or manifest_package, \ 'Must set manifest package in GN or in AndroidManifest.xml' - manifest_utils.AssertPackage(manifest, manifest_package) if manifest_package: manifest.set('package', manifest_package) - tmp_prefix = os.path.basename(manifest_path) + tmp_prefix = manifest_path.replace(os.path.sep, '-') with tempfile.NamedTemporaryFile(prefix=tmp_prefix) as patched_manifest: manifest_utils.SaveManifest(doc, patched_manifest.name) yield patched_manifest.name, manifest_utils.GetPackage(manifest) -def _BuildManifestMergerClasspath(android_sdk_cmdline_tools): - return ':'.join([ - os.path.join(android_sdk_cmdline_tools, 'lib', jar) - for jar in _MANIFEST_MERGER_JARS - ]) +@contextlib.contextmanager +def _ProcessOtherManifest(manifest_path, target_sdk_version, + seen_package_names): + """Patches non-main AndroidManifest.xml if necessary.""" + # 1. Ensure targetSdkVersion is set to the expected value to avoid + # spurious permissions being added (b/222331337). + # 2. Ensure all manifests have a unique package name so that the merger + # does not fail when this happens. + doc, manifest, _ = manifest_utils.ParseManifest(manifest_path) + + changed_api = manifest_utils.SetTargetApiIfUnset(manifest, target_sdk_version) + + package_name = manifest_utils.GetPackage(manifest) + package_count = seen_package_names[package_name] + seen_package_names[package_name] += 1 + if package_count > 0: + manifest.set('package', f'{package_name}_{package_count}') + + if package_count > 0 or changed_api: + tmp_prefix = manifest_path.replace(os.path.sep, '-') + with tempfile.NamedTemporaryFile(prefix=tmp_prefix) as patched_manifest: + manifest_utils.SaveManifest(doc, patched_manifest.name) + yield patched_manifest.name + else: + yield manifest_path def main(argv): argv = build_utils.ExpandFileArgs(argv) parser = argparse.ArgumentParser(description=__doc__) - build_utils.AddDepfileOption(parser) - parser.add_argument( - '--android-sdk-cmdline-tools', - help='Path to SDK\'s cmdline-tools folder.', - required=True) + action_helpers.add_depfile_arg(parser) + parser.add_argument('--manifest-merger-jar', + help='Path to SDK\'s manifest merger jar.', + required=True) parser.add_argument('--root-manifest', help='Root manifest which to merge into', required=True) @@ -90,12 +96,10 @@ def main(argv): help='Treat all warnings as errors.') args = parser.parse_args(argv) - classpath = _BuildManifestMergerClasspath(args.android_sdk_cmdline_tools) - - with build_utils.AtomicOutput(args.output) as output: - cmd = build_utils.JavaCmd(args.warnings_as_errors) + [ + with action_helpers.atomic_output(args.output) as output: + cmd = build_utils.JavaCmd() + [ '-cp', - classpath, + args.manifest_merger_jar, _MANIFEST_MERGER_MAIN_CLASS, '--out', output.name, @@ -111,14 +115,21 @@ def main(argv): 'MAX_SDK_VERSION=' + args.max_sdk_version, ] - extras = build_utils.ParseGnList(args.extras) - if extras: - cmd += ['--libs', ':'.join(extras)] - - with _ProcessManifest(args.root_manifest, args.min_sdk_version, - args.target_sdk_version, args.max_sdk_version, - args.manifest_package) as tup: - root_manifest, package = tup + extras = action_helpers.parse_gn_list(args.extras) + + with contextlib.ExitStack() as stack: + root_manifest, package = stack.enter_context( + _ProcessMainManifest(args.root_manifest, args.min_sdk_version, + args.target_sdk_version, args.max_sdk_version, + args.manifest_package)) + if extras: + seen_package_names = collections.Counter() + extras_processed = [ + stack.enter_context( + _ProcessOtherManifest(e, args.target_sdk_version, + seen_package_names)) for e in extras + ] + cmd += ['--libs', ':'.join(extras_processed)] cmd += [ '--main', root_manifest, @@ -134,15 +145,8 @@ def main(argv): IsTimeStale(output.name, [root_manifest] + extras), fail_on_output=args.warnings_as_errors) - # Check for correct output. - _, manifest, _ = manifest_utils.ParseManifest(output.name) - manifest_utils.AssertUsesSdk(manifest, args.min_sdk_version, - args.target_sdk_version) - manifest_utils.AssertPackage(manifest, package) - if args.depfile: - inputs = extras + classpath.split(':') - build_utils.WriteDepfile(args.depfile, args.output, inputs=inputs) + action_helpers.write_depfile(args.depfile, args.output, inputs=extras) if __name__ == '__main__': diff --git a/build/android/gyp/merge_manifest.pydeps b/build/android/gyp/merge_manifest.pydeps index ef9bb340479c..003690ff3029 100644 --- a/build/android/gyp/merge_manifest.pydeps +++ b/build/android/gyp/merge_manifest.pydeps @@ -1,5 +1,6 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/merge_manifest.pydeps build/android/gyp/merge_manifest.py +../../action_helpers.py ../../gn_helpers.py merge_manifest.py util/__init__.py diff --git a/build/android/gyp/native_libraries_template.py b/build/android/gyp/native_libraries_template.py deleted file mode 100644 index cf336ecf4924..000000000000 --- a/build/android/gyp/native_libraries_template.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -NATIVE_LIBRARIES_TEMPLATE = """\ -// This file is autogenerated by -// build/android/gyp/write_native_libraries_java.py -// Please do not change its content. - -package org.chromium.build; - -public class NativeLibraries {{ - public static final int CPU_FAMILY_UNKNOWN = 0; - public static final int CPU_FAMILY_ARM = 1; - public static final int CPU_FAMILY_MIPS = 2; - public static final int CPU_FAMILY_X86 = 3; - - // Set to true to enable the use of the Chromium Linker. - public static {MAYBE_FINAL}boolean sUseLinker{USE_LINKER}; - public static {MAYBE_FINAL}boolean sUseLibraryInZipFile{USE_LIBRARY_IN_ZIP_FILE}; - public static {MAYBE_FINAL}boolean sUseModernLinker{USE_MODERN_LINKER}; - - // This is the list of native libraries to be loaded (in the correct order) - // by LibraryLoader.java. - // TODO(cjhopman): This is public since it is referenced by NativeTestActivity.java - // directly. The two ways of library loading should be refactored into one. - public static {MAYBE_FINAL}String[] LIBRARIES = {{{LIBRARIES}}}; - - // This is the expected version of the 'main' native library, which is the one that - // implements the initial set of base JNI functions including - // base::android::nativeGetVersionName() - // TODO(torne): This is public to work around classloader issues in Trichrome - // where NativeLibraries is not in the same dex as LibraryLoader. - // We should instead split up Java code along package boundaries. - public static {MAYBE_FINAL}String sVersionNumber = {VERSION_NUMBER}; - - public static {MAYBE_FINAL}int sCpuFamily = {CPU_FAMILY}; -}} -""" diff --git a/build/android/gyp/nocompile_test.py b/build/android/gyp/nocompile_test.py index a5739f17d726..c3b02d2c961f 100755 --- a/build/android/gyp/nocompile_test.py +++ b/build/android/gyp/nocompile_test.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Checks that compiling targets in BUILD.gn file fails.""" @@ -13,10 +13,13 @@ from util import build_utils _CHROMIUM_SRC = os.path.normpath(os.path.join(__file__, '..', '..', '..', '..')) -_NINJA_PATH = os.path.join(_CHROMIUM_SRC, 'third_party', 'depot_tools', 'ninja') +_NINJA_PATH = os.path.join(_CHROMIUM_SRC, 'third_party', 'ninja', 'ninja') # Relative to _CHROMIUM_SRC -_GN_SRC_REL_PATH = os.path.join('third_party', 'depot_tools', 'gn') +_GN_SRC_REL_PATH = os.path.join('buildtools', 'linux64', 'gn') + +# Regex for determining whether compile failed because 'gn gen' needs to be run. +_GN_GEN_REGEX = re.compile(r'ninja: (error|fatal):') def _raise_command_exception(args, returncode, output): @@ -47,17 +50,22 @@ def _run_command(args, cwd=None): _raise_command_exception(args, p.returncode, pout) -def _run_command_get_output(args, success_output): - """Runs shell command and returns command output.""" +def _run_command_get_failure_output(args): + """Runs shell command. + + Returns: + Command output if command fails, None if command succeeds. + """ p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) pout, _ = p.communicate() + if p.returncode == 0: - return success_output + return None # For Python3 only: if isinstance(pout, bytes) and sys.version_info >= (3, ): pout = pout.decode('utf-8') - return pout + return '' if pout is None else pout def _copy_and_append_gn_args(src_args_path, dest_args_path, extra_args): @@ -74,21 +82,57 @@ def _copy_and_append_gn_args(src_args_path, dest_args_path, extra_args): f_out.write('\n'.join(extra_args)) -def _find_lines_after_prefix(text, prefix, num_lines): - """Searches |text| for a line which starts with |prefix|. +def _find_regex_in_test_failure_output(test_output, regex): + """Searches for regex in test output. - Args: - text: String to search in. - prefix: Prefix to search for. - num_lines: Number of lines, starting with line with prefix, to return. - Returns: - Matched lines. Returns None otherwise. + Args: + test_output: test output. + regex: regular expression to search for. + Returns: + Whether the regular expression was found in the part of the test output + after the 'FAILED' message. + + If the regex does not contain '\n': + the first 5 lines after the 'FAILED' message (including the text on the + line after the 'FAILED' message) is searched. + Otherwise: + the entire test output after the 'FAILED' message is searched. """ - lines = text.split('\n') - for i, line in enumerate(lines): - if line.startswith(prefix): - return lines[i:i + num_lines] - return None + if test_output is None: + return False + + failed_index = test_output.find('FAILED') + if failed_index < 0: + return False + + failure_message = test_output[failed_index:] + if regex.find('\n') >= 0: + return re.search(regex, failure_message) + + return _search_regex_in_list(failure_message.split('\n')[:5], regex) + + +def _search_regex_in_list(value, regex): + for line in value: + if re.search(regex, line): + return True + return False + + +def _do_build_get_failure_output(gn_path, gn_cmd, options): + # Extract directory from test target. As all of the test targets are declared + # in the same BUILD.gn file, it does not matter which test target is used. + target_dir = gn_path.rsplit(':', 1)[0] + + if gn_cmd is not None: + gn_args = [ + _GN_SRC_REL_PATH, '--root-target=' + target_dir, gn_cmd, + os.path.relpath(options.out_dir, _CHROMIUM_SRC) + ] + _run_command(gn_args, cwd=_CHROMIUM_SRC) + + ninja_args = [_NINJA_PATH, '-C', options.out_dir, gn_path] + return _run_command_get_failure_output(ninja_args) def main(): @@ -106,7 +150,10 @@ def main(): options = parser.parse_args() with open(options.test_configs_path) as f: - test_configs = json.loads(f.read()) + # Escape '\' in '\.' now. This avoids having to do the escaping in the test + # specification. + config_text = f.read().replace(r'\.', r'\\.') + test_configs = json.loads(config_text) if not os.path.exists(options.out_dir): os.makedirs(options.out_dir) @@ -121,34 +168,34 @@ def main(): _copy_and_append_gn_args(options.gn_args_path, out_gn_args_path, extra_gn_args) - # As all of the test targets are declared in the same BUILD.gn file, it does - # not matter which test target is used as the root target. - gn_args = [ - _GN_SRC_REL_PATH, '--root-target=' + test_configs[0]['target'], 'gen', - os.path.relpath(options.out_dir, _CHROMIUM_SRC) - ] - _run_command(gn_args, cwd=_CHROMIUM_SRC) - + ran_gn_gen = False + did_clean_build = False error_messages = [] for config in test_configs: # Strip leading '//' gn_path = config['target'][2:] expect_regex = config['expect_regex'] - ninja_args = [_NINJA_PATH, '-C', options.out_dir, gn_path] - - # Purpose of quotes at beginning of message is to make it clear that - # "Compile successful." is not a compiler log message. - test_output = _run_command_get_output(ninja_args, '""\nCompile successful.') - - failure_message_lines = _find_lines_after_prefix(test_output, 'FAILED:', 5) - found_expect_regex = False - if failure_message_lines: - for line in failure_message_lines: - if re.search(expect_regex, line): - found_expect_regex = True - break - if not found_expect_regex: + test_output = _do_build_get_failure_output(gn_path, None, options) + + # 'gn gen' takes > 1s to run. Only run 'gn gen' if it is needed for compile. + if (test_output + and _search_regex_in_list(test_output.split('\n'), _GN_GEN_REGEX)): + assert not ran_gn_gen + ran_gn_gen = True + test_output = _do_build_get_failure_output(gn_path, 'gen', options) + + if (not _find_regex_in_test_failure_output(test_output, expect_regex) + and not did_clean_build): + # Ensure the failure is not due to incremental build. + did_clean_build = True + test_output = _do_build_get_failure_output(gn_path, 'clean', options) + + if not _find_regex_in_test_failure_output(test_output, expect_regex): + if test_output is None: + # Purpose of quotes at beginning of message is to make it clear that + # "Compile successful." is not a compiler log message. + test_output = '""\nCompile successful.' error_message = '//{} failed.\nExpected compile output pattern:\n'\ '{}\nActual compile output:\n{}'.format( gn_path, expect_regex, test_output) diff --git a/build/android/gyp/optimize_resources.py b/build/android/gyp/optimize_resources.py new file mode 100755 index 000000000000..f1be4ccf1a9c --- /dev/null +++ b/build/android/gyp/optimize_resources.py @@ -0,0 +1,152 @@ +#!/usr/bin/env python3 +# +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import logging +import os +import sys + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. + + +def _ParseArgs(args): + """Parses command line options. + + Returns: + An options object as from argparse.ArgumentParser.parse_args() + """ + parser = argparse.ArgumentParser() + parser.add_argument('--aapt2-path', + required=True, + help='Path to the Android aapt2 tool.') + parser.add_argument( + '--short-resource-paths', + action='store_true', + help='Whether to shorten resource paths inside the apk or module.') + parser.add_argument( + '--strip-resource-names', + action='store_true', + help='Whether to strip resource names from the resource table of the apk ' + 'or module.') + parser.add_argument('--proto-path', + required=True, + help='Input proto format resources APK.') + parser.add_argument('--resources-config-paths', + default='[]', + help='GN list of paths to aapt2 resources config files.') + parser.add_argument('--r-text-in', + required=True, + help='Path to R.txt. Used to exclude id/ resources.') + parser.add_argument( + '--resources-path-map-out-path', + help='Path to file produced by aapt2 that maps original resource paths ' + 'to shortened resource paths inside the apk or module.') + parser.add_argument('--optimized-proto-path', + required=True, + help='Output for `aapt2 optimize`.') + options = parser.parse_args(args) + + options.resources_config_paths = action_helpers.parse_gn_list( + options.resources_config_paths) + + if options.resources_path_map_out_path and not options.short_resource_paths: + parser.error( + '--resources-path-map-out-path requires --short-resource-paths') + return options + + +def _CombineResourceConfigs(resources_config_paths, out_config_path): + with open(out_config_path, 'w') as out_config: + for config_path in resources_config_paths: + with open(config_path) as config: + out_config.write(config.read()) + out_config.write('\n') + + +def _ExtractNonCollapsableResources(rtxt_path): + """Extract resources that should not be collapsed from the R.txt file + + Resources of type ID are references to UI elements/views. They are used by + UI automation testing frameworks. They are kept in so that they don't break + tests, even though they may not actually be used during runtime. See + https://crbug.com/900993 + App icons (aka mipmaps) are sometimes referenced by other apps by name so must + be keps as well. See https://b/161564466 + + Args: + rtxt_path: Path to R.txt file with all the resources + Returns: + List of resources in the form of / + """ + resources = [] + _NO_COLLAPSE_TYPES = ['id', 'mipmap'] + with open(rtxt_path) as rtxt: + for line in rtxt: + for resource_type in _NO_COLLAPSE_TYPES: + if ' {} '.format(resource_type) in line: + resource_name = line.split()[2] + resources.append('{}/{}'.format(resource_type, resource_name)) + return resources + + +def _OptimizeApk(output, options, temp_dir, unoptimized_path, r_txt_path): + """Optimize intermediate .ap_ file with aapt2. + + Args: + output: Path to write to. + options: The command-line options. + temp_dir: A temporary directory. + unoptimized_path: path of the apk to optimize. + r_txt_path: path to the R.txt file of the unoptimized apk. + """ + optimize_command = [ + options.aapt2_path, + 'optimize', + unoptimized_path, + '-o', + output, + ] + + # Optimize the resources.pb file by obfuscating resource names and only + # allow usage via R.java constant. + if options.strip_resource_names: + no_collapse_resources = _ExtractNonCollapsableResources(r_txt_path) + gen_config_path = os.path.join(temp_dir, 'aapt2.config') + if options.resources_config_paths: + _CombineResourceConfigs(options.resources_config_paths, gen_config_path) + with open(gen_config_path, 'a') as config: + for resource in no_collapse_resources: + config.write('{}#no_collapse\n'.format(resource)) + + optimize_command += [ + '--collapse-resource-names', + '--resources-config-path', + gen_config_path, + ] + + if options.short_resource_paths: + optimize_command += ['--shorten-resource-paths'] + if options.resources_path_map_out_path: + optimize_command += [ + '--resource-path-shortening-map', options.resources_path_map_out_path + ] + + logging.debug('Running aapt2 optimize') + build_utils.CheckOutput(optimize_command, + print_stdout=False, + print_stderr=False) + + +def main(args): + options = _ParseArgs(args) + with build_utils.TempDir() as temp_dir: + _OptimizeApk(options.optimized_proto_path, options, temp_dir, + options.proto_path, options.r_text_in) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/build/android/gyp/dexsplitter.pydeps b/build/android/gyp/optimize_resources.pydeps similarity index 52% rename from build/android/gyp/dexsplitter.pydeps rename to build/android/gyp/optimize_resources.pydeps index cefc5722d5db..be3e8e7135d4 100644 --- a/build/android/gyp/dexsplitter.pydeps +++ b/build/android/gyp/optimize_resources.pydeps @@ -1,6 +1,7 @@ # Generated by running: -# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dexsplitter.pydeps build/android/gyp/dexsplitter.py +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/optimize_resources.pydeps build/android/gyp/optimize_resources.py +../../action_helpers.py ../../gn_helpers.py -dexsplitter.py +optimize_resources.py util/__init__.py util/build_utils.py diff --git a/build/android/gyp/prepare_resources.py b/build/android/gyp/prepare_resources.py index 93fe9f91d6e5..e86711c3b765 100755 --- a/build/android/gyp/prepare_resources.py +++ b/build/android/gyp/prepare_resources.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -18,6 +18,8 @@ from util import md5_check from util import resources_parser from util import resource_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers def _ParseArgs(args): @@ -27,7 +29,7 @@ def _ParseArgs(args): An options object as from argparse.ArgumentParser.parse_args() """ parser = argparse.ArgumentParser(description=__doc__) - build_utils.AddDepfileOption(parser) + action_helpers.add_depfile_arg(parser) parser.add_argument('--res-sources-path', required=True, @@ -38,6 +40,12 @@ def _ParseArgs(args): help='Path to pre-existing R.txt. Its resource IDs override those found ' 'in the generated R.txt when generating R.java.') + parser.add_argument( + '--allow-missing-resources', + action='store_true', + help='Do not fail if some resources exist in the res/ dir but are not ' + 'listed in the sources.') + parser.add_argument( '--resource-zip-out', help='Path to a zip archive containing all resources from ' @@ -110,7 +118,7 @@ def _ZipResources(resource_dirs, zip_path, ignore_pattern): # the contents of possibly multiple res/ dirs each within an encapsulating # directory within the zip. z.comment = resource_utils.MULTIPLE_RES_MAGIC_STRING - build_utils.DoZip(files_to_zip, z) + zip_helpers.add_files_to_zip(files_to_zip, z) def _GenerateRTxt(options, r_txt_path): @@ -130,7 +138,7 @@ def _GenerateRTxt(options, r_txt_path): def _OnStaleMd5(options): with resource_utils.BuildContext() as build: - if options.sources: + if options.sources and not options.allow_missing_resources: _CheckAllFilesListed(options.sources, options.resource_dirs) if options.r_text_in: r_txt_path = options.r_text_in diff --git a/build/android/gyp/prepare_resources.pydeps b/build/android/gyp/prepare_resources.pydeps index b225918c4dc9..5c7c4410a237 100644 --- a/build/android/gyp/prepare_resources.pydeps +++ b/build/android/gyp/prepare_resources.pydeps @@ -1,9 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/prepare_resources.pydeps build/android/gyp/prepare_resources.py ../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py -../../../third_party/jinja2/asyncfilters.py -../../../third_party/jinja2/asyncsupport.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py ../../../third_party/jinja2/bccache.py ../../../third_party/jinja2/compiler.py ../../../third_party/jinja2/defaults.py @@ -23,8 +22,10 @@ ../../../third_party/markupsafe/__init__.py ../../../third_party/markupsafe/_compat.py ../../../third_party/markupsafe/_native.py +../../action_helpers.py ../../gn_helpers.py ../../print_python_deps.py +../../zip_helpers.py prepare_resources.py util/__init__.py util/build_utils.py diff --git a/build/android/gyp/process_native_prebuilt.py b/build/android/gyp/process_native_prebuilt.py index 52645d9b1629..060adae81400 100755 --- a/build/android/gyp/process_native_prebuilt.py +++ b/build/android/gyp/process_native_prebuilt.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -10,6 +10,7 @@ import sys from util import build_utils +import action_helpers # build_utils adds //build to sys.path. def main(args): @@ -23,7 +24,7 @@ def main(args): # eu-strip's output keeps mode from source file which might not be writable # thus it fails to override its output on the next run. AtomicOutput fixes # the issue. - with build_utils.AtomicOutput(options.stripped_output_path) as out: + with action_helpers.atomic_output(options.stripped_output_path) as out: cmd = [ options.strip_path, options.input_path, diff --git a/build/android/gyp/process_native_prebuilt.pydeps b/build/android/gyp/process_native_prebuilt.pydeps index 8e2012acebbf..baf9eff7a28d 100644 --- a/build/android/gyp/process_native_prebuilt.pydeps +++ b/build/android/gyp/process_native_prebuilt.pydeps @@ -1,5 +1,6 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/process_native_prebuilt.pydeps build/android/gyp/process_native_prebuilt.py +../../action_helpers.py ../../gn_helpers.py process_native_prebuilt.py util/__init__.py diff --git a/build/android/gyp/proguard.py b/build/android/gyp/proguard.py index 6444f6b207de..579501c7ded3 100755 --- a/build/android/gyp/proguard.py +++ b/build/android/gyp/proguard.py @@ -1,57 +1,43 @@ #!/usr/bin/env python3 # -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import argparse -from collections import defaultdict import logging import os +import pathlib import re import shutil import sys -import tempfile import zipfile import dex -import dex_jdk_libs -from pylib.dex import dex_parser from util import build_utils from util import diff_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers -_API_LEVEL_VERSION_CODE = [ - (21, 'L'), - (22, 'LollipopMR1'), - (23, 'M'), - (24, 'N'), - (25, 'NMR1'), - (26, 'O'), - (27, 'OMR1'), - (28, 'P'), - (29, 'Q'), - (30, 'R'), - (31, 'S'), +_BLOCKLISTED_EXPECTATION_PATHS = [ + # A separate expectation file is created for these files. + 'clank/third_party/google3/pg_confs/', ] +_DUMP_DIR_NAME = 'r8inputs_dir' + def _ParseOptions(): args = build_utils.ExpandFileArgs(sys.argv[1:]) parser = argparse.ArgumentParser() - build_utils.AddDepfileOption(parser) + action_helpers.add_depfile_arg(parser) parser.add_argument('--r8-path', required=True, help='Path to the R8.jar to use.') - parser.add_argument( - '--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.') parser.add_argument('--input-paths', action='append', required=True, help='GN-list of .jar files to optimize.') - parser.add_argument('--desugar-jdk-libs-jar', - help='Path to desugar_jdk_libs.jar.') - parser.add_argument('--desugar-jdk-libs-configuration-jar', - help='Path to desugar_jdk_libs_configuration.jar.') parser.add_argument('--output-path', help='Path to the generated .jar file.') parser.add_argument( '--proguard-configs', @@ -83,19 +69,19 @@ def _ParseOptions(): '--verbose', '-v', action='store_true', help='Print all ProGuard output') parser.add_argument( '--repackage-classes', help='Package all optimized classes are put in.') - parser.add_argument( - '--disable-outlining', - action='store_true', - help='Disable the outlining optimization provided by R8.') parser.add_argument( '--disable-checks', action='store_true', help='Disable -checkdiscard directives and missing symbols check') - parser.add_argument('--sourcefile', help='Value for source file attribute') + parser.add_argument('--source-file', help='Value for source file attribute.') + parser.add_argument('--package-name', + help='Goes into a comment in the mapping file.') parser.add_argument( '--force-enable-assertions', action='store_true', help='Forcefully enable javac generated assertion code.') + parser.add_argument('--assertion-handler', + help='The class name of the assertion handler class.') parser.add_argument( '--feature-jars', action='append', @@ -134,6 +120,10 @@ def _ParseOptions(): action='store_true', help='Use when filing R8 bugs to capture inputs.' ' Stores inputs to r8inputs.zip') + parser.add_argument( + '--dump-unknown-refs', + action='store_true', + help='Log all reasons why API modelling cannot determine API level') parser.add_argument( '--stamp', help='File to touch upon success. Mutually exclusive with --output-path') @@ -153,13 +143,18 @@ def _ParseOptions(): if bool(options.keep_rules_targets_regex) != bool( options.keep_rules_output_path): - raise Exception('You must path both --keep-rules-targets-regex and ' - '--keep-rules-output-path') - - options.classpath = build_utils.ParseGnList(options.classpath) - options.proguard_configs = build_utils.ParseGnList(options.proguard_configs) - options.input_paths = build_utils.ParseGnList(options.input_paths) - options.extra_mapping_output_paths = build_utils.ParseGnList( + parser.error('You must path both --keep-rules-targets-regex and ' + '--keep-rules-output-path') + + if options.force_enable_assertions and options.assertion_handler: + parser.error('Cannot use both --force-enable-assertions and ' + '--assertion-handler') + + options.classpath = action_helpers.parse_gn_list(options.classpath) + options.proguard_configs = action_helpers.parse_gn_list( + options.proguard_configs) + options.input_paths = action_helpers.parse_gn_list(options.input_paths) + options.extra_mapping_output_paths = action_helpers.parse_gn_list( options.extra_mapping_output_paths) if options.feature_names: @@ -170,7 +165,7 @@ def _ParseOptions(): parser.error('Invalid feature argument lengths.') options.feature_jars = [ - build_utils.ParseGnList(x) for x in options.feature_jars + action_helpers.parse_gn_list(x) for x in options.feature_jars ] split_map = {} @@ -186,7 +181,7 @@ def _ParseOptions(): return options -class _SplitContext(object): +class _SplitContext: def __init__(self, name, output_path, input_jars, work_dir, parent_name=None): self.name = name self.parent_name = parent_name @@ -195,18 +190,12 @@ def __init__(self, name, output_path, input_jars, work_dir, parent_name=None): self.staging_dir = os.path.join(work_dir, name) os.mkdir(self.staging_dir) - def CreateOutput(self, has_imported_lib=False, keep_rule_output=None): + def CreateOutput(self): found_files = build_utils.FindInDirectory(self.staging_dir) if not found_files: raise Exception('Missing dex outputs in {}'.format(self.staging_dir)) if self.final_output_path.endswith('.dex'): - if has_imported_lib: - raise Exception( - 'Trying to create a single .dex file, but a dependency requires ' - 'JDK Library Desugaring (which necessitates a second file).' - 'Refer to %s to see what desugaring was required' % - keep_rule_output) if len(found_files) != 1: raise Exception('Expected exactly 1 dex file output, found: {}'.format( '\t'.join(found_files))) @@ -216,52 +205,12 @@ def CreateOutput(self, has_imported_lib=False, keep_rule_output=None): # Add to .jar using Python rather than having R8 output to a .zip directly # in order to disable compression of the .jar, saving ~500ms. tmp_jar_output = self.staging_dir + '.jar' - build_utils.DoZip(found_files, tmp_jar_output, base_dir=self.staging_dir) + zip_helpers.add_files_to_zip(found_files, + tmp_jar_output, + base_dir=self.staging_dir) shutil.move(tmp_jar_output, self.final_output_path) -def _DeDupeInputJars(split_contexts_by_name): - """Moves jars used by multiple splits into common ancestors. - - Updates |input_jars| for each _SplitContext. - """ - - def count_ancestors(split_context): - ret = 0 - if split_context.parent_name: - ret += 1 - ret += count_ancestors(split_contexts_by_name[split_context.parent_name]) - return ret - - base_context = split_contexts_by_name['base'] - # Sort by tree depth so that ensure children are visited before their parents. - sorted_contexts = list(split_contexts_by_name.values()) - sorted_contexts.remove(base_context) - sorted_contexts.sort(key=count_ancestors, reverse=True) - - # If a jar is present in multiple siblings, promote it to their parent. - seen_jars_by_parent = defaultdict(set) - for split_context in sorted_contexts: - seen_jars = seen_jars_by_parent[split_context.parent_name] - new_dupes = seen_jars.intersection(split_context.input_jars) - parent_context = split_contexts_by_name[split_context.parent_name] - parent_context.input_jars.update(new_dupes) - seen_jars.update(split_context.input_jars) - - def ancestor_jars(parent_name, dest=None): - dest = dest or set() - if not parent_name: - return dest - parent_context = split_contexts_by_name[parent_name] - dest.update(parent_context.input_jars) - return ancestor_jars(parent_context.parent_name, dest) - - # Now that jars have been moved up the tree, remove those that appear in - # ancestors. - for split_context in sorted_contexts: - split_context.input_jars -= ancestor_jars(split_context.parent_name) - - def _OptimizeWithR8(options, config_paths, libraries, @@ -304,19 +253,27 @@ def _OptimizeWithR8(options, base_context = split_contexts_by_name['base'] # R8 OOMs with the default xmx=1G. - cmd = build_utils.JavaCmd(options.warnings_as_errors, xmx='2G') + [ - '-Dcom.android.tools.r8.allowTestProguardOptions=1', - '-Dcom.android.tools.r8.disableHorizontalClassMerging=1', + cmd = build_utils.JavaCmd(xmx='2G') + [ + # Allows -whyareyounotinlining, which we don't have by default, but + # which is useful for one-off queries. + '-Dcom.android.tools.r8.experimental.enablewhyareyounotinlining=1', + # Restricts horizontal class merging to apply only to classes that + # share a .java file (nested classes). https://crbug.com/1363709 + '-Dcom.android.tools.r8.enableSameFilePolicy=1', ] - if options.disable_outlining: - cmd += ['-Dcom.android.tools.r8.disableOutlining=1'] if options.dump_inputs: - cmd += ['-Dcom.android.tools.r8.dumpinputtofile=r8inputs.zip'] + cmd += [f'-Dcom.android.tools.r8.dumpinputtodirectory={_DUMP_DIR_NAME}'] + if options.dump_unknown_refs: + cmd += ['-Dcom.android.tools.r8.reportUnknownApiReferences=1'] cmd += [ '-cp', options.r8_path, 'com.android.tools.r8.R8', '--no-data-resources', + '--map-id-template', + f'{options.source_file} ({options.package_name})', + '--source-file-template', + options.source_file, '--output', base_context.staging_dir, '--pg-map-output', @@ -324,21 +281,18 @@ def _OptimizeWithR8(options, ] if options.disable_checks: - # Info level priority logs are not printed by default. - cmd += ['--map-diagnostics:CheckDiscardDiagnostic', 'error', 'info'] - - if options.desugar_jdk_libs_json: - cmd += [ - '--desugared-lib', - options.desugar_jdk_libs_json, - '--desugared-lib-pg-conf-output', - options.desugared_library_keep_rule_output, - ] + cmd += ['--map-diagnostics:CheckDiscardDiagnostic', 'error', 'none'] + cmd += ['--map-diagnostics', 'info', 'warning'] + # An "error" level diagnostic causes r8 to return an error exit code. Doing + # this allows our filter to decide what should/shouldn't break our build. + cmd += ['--map-diagnostics', 'error', 'warning'] if options.min_api: cmd += ['--min-api', options.min_api] - if options.force_enable_assertions: + if options.assertion_handler: + cmd += ['--force-assertions-handler:' + options.assertion_handler] + elif options.force_enable_assertions: cmd += ['--force-enable-assertions'] for lib in libraries: @@ -351,8 +305,6 @@ def _OptimizeWithR8(options, for main_dex_rule in options.main_dex_rules_path: cmd += ['--main-dex-rules', main_dex_rule] - _DeDupeInputJars(split_contexts_by_name) - # Add any extra inputs to the base context (e.g. desugar runtime). extra_jars = set(options.input_paths) for split_context in split_contexts_by_name.values(): @@ -375,61 +327,26 @@ def _OptimizeWithR8(options, print_stdout=print_stdout, stderr_filter=stderr_filter, fail_on_output=options.warnings_as_errors) - except build_utils.CalledProcessError as err: - debugging_link = ('\n\nR8 failed. Please see {}.'.format( - 'https://chromium.googlesource.com/chromium/src/+/HEAD/build/' - 'android/docs/java_optimization.md#Debugging-common-failures\n')) - raise build_utils.CalledProcessError(err.cwd, err.args, - err.output + debugging_link) - - base_has_imported_lib = False - if options.desugar_jdk_libs_json: - logging.debug('Running L8') - existing_files = build_utils.FindInDirectory(base_context.staging_dir) - jdk_dex_output = os.path.join(base_context.staging_dir, - 'classes%d.dex' % (len(existing_files) + 1)) - # Use -applymapping to avoid name collisions. - l8_dynamic_config_path = os.path.join(tmp_dir, 'l8_dynamic_config.flags') - with open(l8_dynamic_config_path, 'w') as f: - f.write("-applymapping '{}'\n".format(tmp_mapping_path)) - # Pass the dynamic config so that obfuscation options are picked up. - l8_config_paths = [dynamic_config_path, l8_dynamic_config_path] - if os.path.exists(options.desugared_library_keep_rule_output): - l8_config_paths.append(options.desugared_library_keep_rule_output) - - base_has_imported_lib = dex_jdk_libs.DexJdkLibJar( - options.r8_path, options.min_api, options.desugar_jdk_libs_json, - options.desugar_jdk_libs_jar, - options.desugar_jdk_libs_configuration_jar, jdk_dex_output, - options.warnings_as_errors, l8_config_paths) - if int(options.min_api) >= 24 and base_has_imported_lib: - with open(jdk_dex_output, 'rb') as f: - dexfile = dex_parser.DexFile(bytearray(f.read())) - for m in dexfile.IterMethodSignatureParts(): - print('{}#{}'.format(m[0], m[2])) - assert False, ( - 'Desugared JDK libs are disabled on Monochrome and newer - see ' - 'crbug.com/1159984 for details, and see above list for desugared ' - 'classes and methods.') + except build_utils.CalledProcessError as e: + # Do not output command line because it is massive and makes the actual + # error message hard to find. + sys.stderr.write(e.output) + sys.exit(1) logging.debug('Collecting ouputs') - base_context.CreateOutput(base_has_imported_lib, - options.desugared_library_keep_rule_output) + base_context.CreateOutput() for split_context in split_contexts_by_name.values(): if split_context is not base_context: split_context.CreateOutput() - with open(options.mapping_output, 'w') as out_file, \ - open(tmp_mapping_path) as in_file: - # Mapping files generated by R8 include comments that may break - # some of our tooling so remove those (specifically: apkanalyzer). - out_file.writelines(l for l in in_file if not l.startswith('#')) - return base_context + shutil.move(tmp_mapping_path, options.mapping_output) + return split_contexts_by_name def _OutputKeepRules(r8_path, input_paths, classpath, targets_re_string, keep_rules_output): - cmd = build_utils.JavaCmd(False) + [ + + cmd = build_utils.JavaCmd() + [ '-cp', r8_path, 'com.android.tools.r8.tracereferences.TraceReferences', '--map-diagnostics:MissingDefinitionsDiagnostic', 'error', 'warning', '--keep-rules', '--output', keep_rules_output @@ -447,8 +364,13 @@ def _OutputKeepRules(r8_path, input_paths, classpath, targets_re_string, def _CheckForMissingSymbols(r8_path, dex_files, classpath, warnings_as_errors, - error_title): - cmd = build_utils.JavaCmd(warnings_as_errors) + [ + dump_inputs, error_title): + cmd = build_utils.JavaCmd() + + if dump_inputs: + cmd += [f'-Dcom.android.tools.r8.dumpinputtodirectory={_DUMP_DIR_NAME}'] + + cmd += [ '-cp', r8_path, 'com.android.tools.r8.tracereferences.TraceReferences', '--map-diagnostics:MissingDefinitionsDiagnostic', 'error', 'warning', '--check' @@ -459,6 +381,8 @@ def _CheckForMissingSymbols(r8_path, dex_files, classpath, warnings_as_errors, for path in dex_files: cmd += ['--source', path] + failed_holder = [False] + def stderr_filter(stderr): ignored_lines = [ # Summary contains warning count, which our filtering makes wrong. @@ -472,16 +396,14 @@ def stderr_filter(stderr): # Found in: com/facebook/fbui/textlayoutbuilder/StaticLayoutHelper 'android.text.StaticLayout.', + # TODO(crbug/1426964): Remove once chrome builds with Android U SDK. + 'android.adservices.measurement', # Explicictly guarded by try (NoClassDefFoundError) in Flogger's # PlatformProvider. 'com.google.common.flogger.backend.google.GooglePlatform', 'com.google.common.flogger.backend.system.DefaultPlatform', - # trichrome_webview_google_bundle contains this missing reference. - # TODO(crbug.com/1142530): Fix this missing reference properly. - 'org.chromium.build.NativeLibraries', - # TODO(agrieve): Exclude these only when use_jacoco_coverage=true. 'java.lang.instrument.ClassFileTransformer', 'java.lang.instrument.IllegalClassFormatException', @@ -495,14 +417,19 @@ def stderr_filter(stderr): # Explicitly guarded by try (NoClassDefFoundError) in Firebase's # KotlinDetector: com.google.firebase.platforminfo.KotlinDetector. 'kotlin.KotlinVersion', + + # TODO(agrieve): Remove once we move to Android U SDK. + 'android.window.BackEvent', + 'android.window.OnBackAnimationCallback', ] had_unfiltered_items = ' ' in stderr stderr = build_utils.FilterLines( stderr, '|'.join(re.escape(x) for x in ignored_lines)) if stderr: - if ' ' in stderr: - stderr = error_title + """ + if 'Missing' in stderr: + failed_holder[0] = True + stderr = 'TraceReferences failed: ' + error_title + """ Tip: Build with: is_java_debug=false treat_warnings_as_errors=false @@ -525,40 +452,60 @@ def stderr_filter(stderr): stderr = '' return stderr - logging.debug('cmd: %s', ' '.join(cmd)) - build_utils.CheckOutput(cmd, - print_stdout=True, - stderr_filter=stderr_filter, - fail_on_output=warnings_as_errors) - + try: + build_utils.CheckOutput(cmd, + print_stdout=True, + stderr_filter=stderr_filter, + fail_on_output=warnings_as_errors) + except build_utils.CalledProcessError as e: + # Do not output command line because it is massive and makes the actual + # error message hard to find. + sys.stderr.write(e.output) + sys.exit(1) + return failed_holder[0] -def _CombineConfigs(configs, dynamic_config_data, exclude_generated=False): - ret = [] +def _CombineConfigs(configs, + dynamic_config_data, + embedded_configs, + exclude_generated=False): # Sort in this way so //clank versions of the same libraries will sort # to the same spot in the file. def sort_key(path): return tuple(reversed(path.split(os.path.sep))) + def format_config_contents(path, contents): + formatted_contents = [] + if not contents.strip(): + return [] + + # Fix up line endings (third_party configs can have windows endings). + contents = contents.replace('\r', '') + # Remove numbers from generated rule comments to make file more + # diff'able. + contents = re.sub(r' #generated:\d+', '', contents) + formatted_contents.append('# File: ' + path) + formatted_contents.append(contents) + formatted_contents.append('') + return formatted_contents + + ret = [] for config in sorted(configs, key=sort_key): if exclude_generated and config.endswith('.resources.proguard.txt'): continue + # Exclude some confs from expectations. + if any(entry in config for entry in _BLOCKLISTED_EXPECTATION_PATHS): + continue + with open(config) as config_file: contents = config_file.read().rstrip() - if not contents.strip(): - # Ignore empty files. - continue + ret.extend(format_config_contents(config, contents)) + + for path, contents in sorted(embedded_configs.items()): + ret.extend(format_config_contents(path, contents)) - # Fix up line endings (third_party configs can have windows endings). - contents = contents.replace('\r', '') - # Remove numbers from generated rule comments to make file more - # diff'able. - contents = re.sub(r' #generated:\d+', '', contents) - ret.append('# File: ' + config) - ret.append(contents) - ret.append('') if dynamic_config_data: ret.append('# File: //build/android/gyp/proguard.py (generated rules)') @@ -568,15 +515,7 @@ def sort_key(path): def _CreateDynamicConfig(options): - # Our scripts already fail on output. Adding -ignorewarnings makes R8 output - # warnings rather than throw exceptions so we can selectively ignore them via - # dex.py's ignore list. Context: https://crbug.com/1180222 - ret = ["-ignorewarnings"] - - if options.sourcefile: - ret.append("-renamesourcefileattribute '%s' # OMIT FROM EXPECTATIONS" % - options.sourcefile) - + ret = [] if options.enable_obfuscation: ret.append("-repackageclasses ''") else: @@ -585,39 +524,30 @@ def _CreateDynamicConfig(options): if options.apply_mapping: ret.append("-applymapping '%s'" % options.apply_mapping) - _min_api = int(options.min_api) if options.min_api else 0 - for api_level, version_code in _API_LEVEL_VERSION_CODE: - annotation_name = 'org.chromium.base.annotations.VerifiesOn' + version_code - if api_level > _min_api: - ret.append('-keep @interface %s' % annotation_name) - ret.append("""\ --if @%s class * { - *** *(...); -} --keep,allowobfuscation class <1> { - *** <2>(...); -}""" % annotation_name) - ret.append("""\ --keepclassmembers,allowobfuscation class ** { - @%s ; -}""" % annotation_name) return '\n'.join(ret) -def _VerifyNoEmbeddedConfigs(jar_paths): - failed = False - for jar_path in jar_paths: - with zipfile.ZipFile(jar_path) as z: - for name in z.namelist(): - if name.startswith('META-INF/proguard/'): - failed = True - sys.stderr.write("""\ -Found embedded proguard config within {}. -Embedded configs are not permitted (https://crbug.com/989505) -""".format(jar_path)) - break - if failed: - sys.exit(1) +def _ExtractEmbeddedConfigs(jar_path, embedded_configs): + with zipfile.ZipFile(jar_path) as z: + proguard_names = [] + r8_names = [] + for info in z.infolist(): + if info.is_dir(): + continue + if info.filename.startswith('META-INF/proguard/'): + proguard_names.append(info.filename) + elif info.filename.startswith('META-INF/com.android.tools/r8/'): + r8_names.append(info.filename) + elif info.filename.startswith('META-INF/com.android.tools/r8-from'): + # Assume our version of R8 is always latest. + if '-upto-' not in info.filename: + r8_names.append(info.filename) + + # Give preference to r8-from-*, then r8/, then proguard/. + active = r8_names or proguard_names + for filename in active: + config_path = '{}:{}'.format(jar_path, filename) + embedded_configs[config_path] = z.read(filename).decode('utf-8').rstrip() def _ContainsDebuggingConfig(config_str): @@ -631,79 +561,130 @@ def _MaybeWriteStampAndDepFile(options, inputs): build_utils.Touch(options.stamp) output = options.stamp if options.depfile: - build_utils.WriteDepfile(options.depfile, output, inputs=inputs) + action_helpers.write_depfile(options.depfile, output, inputs=inputs) -def main(): - build_utils.InitLogging('PROGUARD_DEBUG') - options = _ParseOptions() +def _IterParentContexts(context_name, split_contexts_by_name): + while context_name: + context = split_contexts_by_name[context_name] + yield context + context_name = context.parent_name - logging.debug('Preparing configs') - proguard_configs = options.proguard_configs +def _DoTraceReferencesChecks(options, split_contexts_by_name): + # Set of all contexts that are a parent to another. + parent_splits_context_names = { + c.parent_name + for c in split_contexts_by_name.values() if c.parent_name + } + context_sets = [ + list(_IterParentContexts(n, split_contexts_by_name)) + for n in parent_splits_context_names + ] + # Visit them in order of: base, base+chrome, base+chrome+thing. + context_sets.sort(key=lambda x: (len(x), x[0].name)) + + # Ensure there are no missing references when considering all dex files. + error_title = 'DEX contains references to non-existent symbols after R8.' + dex_files = sorted(c.final_output_path + for c in split_contexts_by_name.values()) + if _CheckForMissingSymbols(options.r8_path, dex_files, options.classpath, + options.warnings_as_errors, options.dump_inputs, + error_title): + # Failed but didn't raise due to warnings_as_errors=False + return + + for context_set in context_sets: + # Ensure there are no references from base -> chrome module, or from + # chrome -> feature modules. + error_title = (f'DEX within module "{context_set[0].name}" contains ' + 'reference(s) to symbols within child splits') + dex_files = [c.final_output_path for c in context_set] + # Each check currently takes about 3 seconds on a fast dev machine, and we + # run 3 of them (all, base, base+chrome). + # We could run them concurrently, to shave off 5-6 seconds, but would need + # to make sure that the order is maintained. + if _CheckForMissingSymbols(options.r8_path, dex_files, options.classpath, + options.warnings_as_errors, options.dump_inputs, + error_title): + # Failed but didn't raise due to warnings_as_errors=False + return + + +def _Run(options): # ProGuard configs that are derived from flags. + logging.debug('Preparing configs') dynamic_config_data = _CreateDynamicConfig(options) + logging.debug('Looking for embedded configs') + # If a jar is part of input no need to include it as library jar. + libraries = [p for p in options.classpath if p not in options.input_paths] + + embedded_configs = {} + for jar_path in options.input_paths + libraries: + _ExtractEmbeddedConfigs(jar_path, embedded_configs) + # ProGuard configs that are derived from flags. - merged_configs = _CombineConfigs( - proguard_configs, dynamic_config_data, exclude_generated=True) + merged_configs = _CombineConfigs(options.proguard_configs, + dynamic_config_data, + embedded_configs, + exclude_generated=True) print_stdout = _ContainsDebuggingConfig(merged_configs) or options.verbose + depfile_inputs = options.proguard_configs + options.input_paths + libraries if options.expected_file: diff_utils.CheckExpectations(merged_configs, options) if options.only_verify_expectations: - build_utils.WriteDepfile(options.depfile, - options.actual_file, - inputs=options.proguard_configs) + action_helpers.write_depfile(options.depfile, + options.actual_file, + inputs=depfile_inputs) return - logging.debug('Looking for embedded configs') - libraries = [] - for p in options.classpath: - # TODO(bjoyce): Remove filter once old android support libraries are gone. - # Fix for having Library class extend program class dependency problem. - if 'com_android_support' in p or 'android_support_test' in p: - continue - # If a jar is part of input no need to include it as library jar. - if p not in libraries and p not in options.input_paths: - libraries.append(p) - _VerifyNoEmbeddedConfigs(options.input_paths + libraries) if options.keep_rules_output_path: _OutputKeepRules(options.r8_path, options.input_paths, options.classpath, options.keep_rules_targets_regex, options.keep_rules_output_path) return - base_context = _OptimizeWithR8(options, proguard_configs, libraries, - dynamic_config_data, print_stdout) + split_contexts_by_name = _OptimizeWithR8(options, options.proguard_configs, + libraries, dynamic_config_data, + print_stdout) if not options.disable_checks: logging.debug('Running tracereferences') - all_dex_files = [] - if options.output_path: - all_dex_files.append(options.output_path) - if options.dex_dests: - all_dex_files.extend(options.dex_dests) - error_title = 'DEX contains references to non-existent symbols after R8.' - _CheckForMissingSymbols(options.r8_path, all_dex_files, options.classpath, - options.warnings_as_errors, error_title) - # Also ensure that base module doesn't have any references to child dex - # symbols. - # TODO(agrieve): Remove this check once r8 desugaring is fixed to not put - # synthesized classes in the base module. - error_title = 'Base module DEX contains references symbols within DFMs.' - _CheckForMissingSymbols(options.r8_path, [base_context.final_output_path], - options.classpath, options.warnings_as_errors, - error_title) + _DoTraceReferencesChecks(options, split_contexts_by_name) for output in options.extra_mapping_output_paths: shutil.copy(options.mapping_output, output) - inputs = options.proguard_configs + options.input_paths + libraries if options.apply_mapping: - inputs.append(options.apply_mapping) + depfile_inputs.append(options.apply_mapping) + + _MaybeWriteStampAndDepFile(options, depfile_inputs) + + +def main(): + build_utils.InitLogging('PROGUARD_DEBUG') + options = _ParseOptions() - _MaybeWriteStampAndDepFile(options, inputs) + if options.dump_inputs: + # Dumping inputs causes output to be emitted, avoid failing due to stdout. + options.warnings_as_errors = False + # Use dumpinputtodirectory instead of dumpinputtofile to avoid failing the + # build and keep running tracereferences. + dump_dir_name = _DUMP_DIR_NAME + dump_dir_path = pathlib.Path(dump_dir_name) + if dump_dir_path.exists(): + shutil.rmtree(dump_dir_path) + # The directory needs to exist before r8 adds the zip files in it. + dump_dir_path.mkdir() + + # This ensure that the final outputs are zipped and easily uploaded to a bug. + try: + _Run(options) + finally: + if options.dump_inputs: + zip_helpers.zip_directory('r8inputs.zip', _DUMP_DIR_NAME) if __name__ == '__main__': diff --git a/build/android/gyp/proguard.pydeps b/build/android/gyp/proguard.pydeps index c1de73b57e80..7ee251b8ebf0 100644 --- a/build/android/gyp/proguard.pydeps +++ b/build/android/gyp/proguard.pydeps @@ -1,16 +1,12 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/proguard.pydeps build/android/gyp/proguard.py +../../action_helpers.py ../../gn_helpers.py ../../print_python_deps.py -../convert_dex_profile.py -../pylib/__init__.py -../pylib/dex/__init__.py -../pylib/dex/dex_parser.py +../../zip_helpers.py dex.py -dex_jdk_libs.py proguard.py util/__init__.py util/build_utils.py util/diff_utils.py util/md5_check.py -util/zipalign.py diff --git a/build/android/gyp/resources_shrinker/BUILD.gn b/build/android/gyp/resources_shrinker/BUILD.gn deleted file mode 100644 index e6381e1bb301..000000000000 --- a/build/android/gyp/resources_shrinker/BUILD.gn +++ /dev/null @@ -1,15 +0,0 @@ -import("//build/config/android/rules.gni") - -java_binary("resources_shrinker") { - sources = [ "//build/android/gyp/resources_shrinker/Shrinker.java" ] - main_class = "build.android.gyp.resources_shrinker.Shrinker" - deps = [ - "//third_party/android_deps:com_android_tools_common_java", - "//third_party/android_deps:com_android_tools_layoutlib_layoutlib_api_java", - "//third_party/android_deps:com_android_tools_sdk_common_java", - "//third_party/android_deps:com_google_guava_guava_java", - "//third_party/android_deps:org_jetbrains_kotlin_kotlin_stdlib_java", - "//third_party/r8:r8_java", - ] - wrapper_script_name = "helper/resources_shrinker" -} diff --git a/build/android/gyp/resources_shrinker/shrinker.py b/build/android/gyp/resources_shrinker/shrinker.py deleted file mode 100755 index 2800ce290813..000000000000 --- a/build/android/gyp/resources_shrinker/shrinker.py +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env python3 -# encoding: utf-8 -# Copyright (c) 2021 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import argparse -import os -import sys - -sys.path.insert( - 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))) -from util import build_utils -from util import resource_utils - - -def main(args): - parser = argparse.ArgumentParser() - - build_utils.AddDepfileOption(parser) - parser.add_argument('--script', - required=True, - help='Path to the unused resources detector script.') - parser.add_argument( - '--dependencies-res-zips', - required=True, - help='Resources zip archives to investigate for unused resources.') - parser.add_argument('--dex', - required=True, - help='Path to dex file, or zip with dex files.') - parser.add_argument( - '--proguard-mapping', - required=True, - help='Path to proguard mapping file for the optimized dex.') - parser.add_argument('--r-text', required=True, help='Path to R.txt') - parser.add_argument('--android-manifest', - required=True, - help='Path to AndroidManifest') - parser.add_argument('--output-config', - required=True, - help='Path to output the aapt2 config to.') - args = build_utils.ExpandFileArgs(args) - options = parser.parse_args(args) - options.dependencies_res_zips = (build_utils.ParseGnList( - options.dependencies_res_zips)) - - # in case of no resources, short circuit early. - if not options.dependencies_res_zips: - build_utils.Touch(options.output_config) - return - - with build_utils.TempDir() as temp_dir: - dep_subdirs = [] - for dependency_res_zip in options.dependencies_res_zips: - dep_subdirs += resource_utils.ExtractDeps([dependency_res_zip], temp_dir) - - build_utils.CheckOutput([ - options.script, '--rtxts', options.r_text, '--manifests', - options.android_manifest, '--resourceDirs', ':'.join(dep_subdirs), - '--dex', options.dex, '--mapping', options.proguard_mapping, - '--outputConfig', options.output_config - ]) - - if options.depfile: - depfile_deps = options.dependencies_res_zips + [ - options.r_text, - options.android_manifest, - options.dex, - options.proguard_mapping, - ] - build_utils.WriteDepfile(options.depfile, options.output_config, - depfile_deps) - - -if __name__ == '__main__': - main(sys.argv[1:]) diff --git a/build/android/gyp/resources_shrinker/shrinker.pydeps b/build/android/gyp/resources_shrinker/shrinker.pydeps deleted file mode 100644 index 92c8905ec4e2..000000000000 --- a/build/android/gyp/resources_shrinker/shrinker.pydeps +++ /dev/null @@ -1,30 +0,0 @@ -# Generated by running: -# build/print_python_deps.py --root build/android/gyp/resources_shrinker --output build/android/gyp/resources_shrinker/shrinker.pydeps build/android/gyp/resources_shrinker/shrinker.py -../../../../third_party/jinja2/__init__.py -../../../../third_party/jinja2/_compat.py -../../../../third_party/jinja2/asyncfilters.py -../../../../third_party/jinja2/asyncsupport.py -../../../../third_party/jinja2/bccache.py -../../../../third_party/jinja2/compiler.py -../../../../third_party/jinja2/defaults.py -../../../../third_party/jinja2/environment.py -../../../../third_party/jinja2/exceptions.py -../../../../third_party/jinja2/filters.py -../../../../third_party/jinja2/idtracking.py -../../../../third_party/jinja2/lexer.py -../../../../third_party/jinja2/loaders.py -../../../../third_party/jinja2/nodes.py -../../../../third_party/jinja2/optimizer.py -../../../../third_party/jinja2/parser.py -../../../../third_party/jinja2/runtime.py -../../../../third_party/jinja2/tests.py -../../../../third_party/jinja2/utils.py -../../../../third_party/jinja2/visitor.py -../../../../third_party/markupsafe/__init__.py -../../../../third_party/markupsafe/_compat.py -../../../../third_party/markupsafe/_native.py -../../../gn_helpers.py -../util/__init__.py -../util/build_utils.py -../util/resource_utils.py -shrinker.py diff --git a/build/android/gyp/system_image_apks.py b/build/android/gyp/system_image_apks.py new file mode 100755 index 000000000000..0b6804b9af11 --- /dev/null +++ b/build/android/gyp/system_image_apks.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 + +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Generates APKs for use on system images.""" + +import argparse +import os +import pathlib +import tempfile +import shutil +import sys +import zipfile + +_DIR_SOURCE_ROOT = str(pathlib.Path(__file__).parents[2]) +sys.path.append(os.path.join(_DIR_SOURCE_ROOT, 'build', 'android', 'gyp')) +from util import build_utils + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--input', required=True, help='Input path') + parser.add_argument('--output', required=True, help='Output path') + parser.add_argument('--bundle-wrapper', help='APK operations script path') + parser.add_argument('--fuse-apk', + help='Create single .apk rather than using apk splits', + action='store_true') + args = parser.parse_args() + + if not args.bundle_wrapper: + shutil.copyfile(args.input, args.output) + return + + with tempfile.NamedTemporaryFile(suffix='.apks') as tmp_file: + cmd = [ + args.bundle_wrapper, 'build-bundle-apks', '--output-apks', tmp_file.name + ] + cmd += ['--build-mode', 'system' if args.fuse_apk else 'system_apks'] + + # Creates a .apks zip file that contains the system image APK(s). + build_utils.CheckOutput(cmd) + + if args.fuse_apk: + with zipfile.ZipFile(tmp_file.name) as z: + pathlib.Path(args.output).write_bytes(z.read('system/system.apk')) + return + + # Rename .apk files and remove toc.pb to make it clear that system apks + # should not be installed via bundletool. + with zipfile.ZipFile(tmp_file.name) as z_input, \ + zipfile.ZipFile(args.output, 'w') as z_output: + for info in z_input.infolist(): + if info.filename.endswith('.apk'): + data = z_input.read(info) + info.filename = (info.filename.replace('splits/', + '').replace('-master', '')) + z_output.writestr(info, data) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/desugar.pydeps b/build/android/gyp/system_image_apks.pydeps similarity index 58% rename from build/android/gyp/desugar.pydeps rename to build/android/gyp/system_image_apks.pydeps index 3e5c9ea23126..35f1dc9fe6e4 100644 --- a/build/android/gyp/desugar.pydeps +++ b/build/android/gyp/system_image_apks.pydeps @@ -1,6 +1,6 @@ # Generated by running: -# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/desugar.pydeps build/android/gyp/desugar.py +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/system_image_apks.pydeps build/android/gyp/system_image_apks.py ../../gn_helpers.py -desugar.py +system_image_apks.py util/__init__.py util/build_utils.py diff --git a/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java index 10860d8332d3..2c4d9a274712 100644 --- a/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java +++ b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java @@ -1,4 +1,4 @@ -// Copyright 2014 The Chromium Authors. All rights reserved. +// Copyright 2014 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java index b09673e21f4f..2762b4f9e036 100644 --- a/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java +++ b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java @@ -1,4 +1,4 @@ -// Copyright 2014 The Chromium Authors. All rights reserved. +// Copyright 2014 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/gyp/trace_event_bytecode_rewriter.py b/build/android/gyp/trace_event_bytecode_rewriter.py new file mode 100755 index 000000000000..3e0e696f511b --- /dev/null +++ b/build/android/gyp/trace_event_bytecode_rewriter.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python3 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Wrapper script around TraceEventAdder script.""" + +import argparse +import sys +import os + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. + + +def main(argv): + argv = build_utils.ExpandFileArgs(argv[1:]) + parser = argparse.ArgumentParser() + action_helpers.add_depfile_arg(parser) + parser.add_argument('--script', + required=True, + help='Path to the java binary wrapper script.') + parser.add_argument('--stamp', help='Path to stamp to mark when finished.') + parser.add_argument('--classpath', action='append', nargs='+') + parser.add_argument('--input-jars', action='append', nargs='+') + parser.add_argument('--output-jars', action='append', nargs='+') + args = parser.parse_args(argv) + + args.classpath = action_helpers.parse_gn_list(args.classpath) + args.input_jars = action_helpers.parse_gn_list(args.input_jars) + args.output_jars = action_helpers.parse_gn_list(args.output_jars) + + for output_jar in args.output_jars: + jar_dir = os.path.dirname(output_jar) + if not os.path.exists(jar_dir): + os.makedirs(jar_dir) + + all_input_jars = set(args.classpath + args.input_jars) + cmd = [ + args.script, '--classpath', ':'.join(sorted(all_input_jars)), + ':'.join(args.input_jars), ':'.join(args.output_jars) + ] + build_utils.CheckOutput(cmd, print_stdout=True) + + build_utils.Touch(args.stamp) + + action_helpers.write_depfile(args.depfile, args.stamp, inputs=all_input_jars) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/gyp/trace_event_bytecode_rewriter.pydeps b/build/android/gyp/trace_event_bytecode_rewriter.pydeps new file mode 100644 index 000000000000..e03fc0c233a8 --- /dev/null +++ b/build/android/gyp/trace_event_bytecode_rewriter.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/trace_event_bytecode_rewriter.pydeps build/android/gyp/trace_event_bytecode_rewriter.py +../../action_helpers.py +../../gn_helpers.py +trace_event_bytecode_rewriter.py +util/__init__.py +util/build_utils.py diff --git a/build/android/gyp/turbine.py b/build/android/gyp/turbine.py index 208cc760637a..2de92f4704ae 100755 --- a/build/android/gyp/turbine.py +++ b/build/android/gyp/turbine.py @@ -1,24 +1,35 @@ #!/usr/bin/env python3 -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Wraps the turbine jar and expands @FileArgs.""" import argparse +import functools import logging -import os -import shutil import sys import time +import zipfile +import compile_java +import javac_output_processor from util import build_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + + +def ProcessJavacOutput(output, target_name): + output_processor = javac_output_processor.JavacOutputProcessor(target_name) + lines = output_processor.Process(output.split('\n')) + return '\n'.join(lines) def main(argv): build_utils.InitLogging('TURBINE_DEBUG') argv = build_utils.ExpandFileArgs(argv[1:]) parser = argparse.ArgumentParser() - build_utils.AddDepfileOption(parser) + action_helpers.add_depfile_arg(parser) + parser.add_argument('--target-name', help='Fully qualified GN target name.') parser.add_argument( '--turbine-jar-path', required=True, help='Path to the turbine jar file.') parser.add_argument( @@ -26,15 +37,6 @@ def main(argv): action='append', default=[], help='List of srcjars to include in compilation.') - parser.add_argument( - '--bootclasspath', - action='append', - default=[], - help='Boot classpath for javac. If this is specified multiple times, ' - 'they will all be appended to construct the classpath.') - parser.add_argument( - '--java-version', - help='Java language version to use in -source and -target args to javac.') parser.add_argument('--classpath', action='append', help='Classpath to use.') parser.add_argument( '--processors', @@ -57,13 +59,14 @@ def main(argv): parser.add_argument('--warnings-as-errors', action='store_true', help='Treat all warnings as errors.') + parser.add_argument('--kotlin-jar-path', + help='Kotlin jar to be merged into the output jar.') options, unknown_args = parser.parse_known_args(argv) - options.bootclasspath = build_utils.ParseGnList(options.bootclasspath) - options.classpath = build_utils.ParseGnList(options.classpath) - options.processorpath = build_utils.ParseGnList(options.processorpath) - options.processors = build_utils.ParseGnList(options.processors) - options.java_srcjars = build_utils.ParseGnList(options.java_srcjars) + options.classpath = action_helpers.parse_gn_list(options.classpath) + options.processorpath = action_helpers.parse_gn_list(options.processorpath) + options.processors = action_helpers.parse_gn_list(options.processors) + options.java_srcjars = action_helpers.parse_gn_list(options.java_srcjars) files = [] for arg in unknown_args: @@ -71,10 +74,19 @@ def main(argv): if arg.startswith('@'): files.extend(build_utils.ReadSourcesList(arg[1:])) - cmd = build_utils.JavaCmd(options.warnings_as_errors) + [ + # The target's .sources file contains both Java and Kotlin files. We use + # compile_kt.py to compile the Kotlin files to .class and header jars. + # Turbine is run only on .java files. + java_files = [f for f in files if f.endswith('.java')] + + cmd = build_utils.JavaCmd() + [ '-classpath', options.turbine_jar_path, 'com.google.turbine.main.Main' ] - javac_cmd = [] + javac_cmd = [ + # We currently target JDK 11 everywhere. + '--release', + '11', + ] # Turbine reads lists from command line args by consuming args until one # starts with double dash (--). Thus command line args should be grouped @@ -83,22 +95,6 @@ def main(argv): cmd += ['--processors'] cmd += options.processors - if options.java_version: - javac_cmd.extend([ - '-source', - options.java_version, - '-target', - options.java_version, - ]) - if options.java_version == '1.8': - # Android's boot jar doesn't contain all java 8 classes. - options.bootclasspath.append(build_utils.RT_JAR_PATH) - - if options.bootclasspath: - cmd += ['--bootclasspath'] - for bootclasspath in options.bootclasspath: - cmd += bootclasspath.split(':') - if options.processorpath: cmd += ['--processorpath'] cmd += options.processorpath @@ -115,40 +111,57 @@ def main(argv): cmd += ['--source_jars'] cmd += options.java_srcjars - if files: + if java_files: # Use jar_path to ensure paths are relative (needed for goma). - files_rsp_path = options.jar_path + '.files_list.txt' + files_rsp_path = options.jar_path + '.java_files_list.txt' with open(files_rsp_path, 'w') as f: - f.write(' '.join(files)) - # Pass source paths as response files to avoid extremely long command lines - # that are tedius to debug. + f.write(' '.join(java_files)) + # Pass source paths as response files to avoid extremely long command + # lines that are tedius to debug. cmd += ['--sources'] cmd += ['@' + files_rsp_path] - if javac_cmd: - cmd.append('--javacopts') - cmd += javac_cmd - cmd.append('--') # Terminate javacopts + cmd += ['--javacopts'] + cmd += javac_cmd + cmd += ['--'] # Terminate javacopts # Use AtomicOutput so that output timestamps are not updated when outputs # are not changed. - with build_utils.AtomicOutput(options.jar_path) as output_jar, \ - build_utils.AtomicOutput(options.generated_jar_path) as generated_jar: - cmd += ['--output', output_jar.name, '--gensrc_output', generated_jar.name] + with action_helpers.atomic_output(options.jar_path) as output_jar, \ + action_helpers.atomic_output(options.generated_jar_path) as gensrc_jar: + cmd += ['--output', output_jar.name, '--gensrc_output', gensrc_jar.name] + process_javac_output_partial = functools.partial( + ProcessJavacOutput, target_name=options.target_name) + logging.debug('Command: %s', cmd) start = time.time() - build_utils.CheckOutput(cmd, - print_stdout=True, - fail_on_output=options.warnings_as_errors) + try: + build_utils.CheckOutput(cmd, + print_stdout=True, + stdout_filter=process_javac_output_partial, + stderr_filter=process_javac_output_partial, + fail_on_output=options.warnings_as_errors) + except build_utils.CalledProcessError as e: + # Do not output stacktrace as it takes up space on gerrit UI, forcing + # you to click though to find the actual compilation error. It's never + # interesting to see the Python stacktrace for a Java compilation error. + sys.stderr.write(e.output) + sys.exit(1) end = time.time() - start logging.info('Header compilation took %ss', end) + if options.kotlin_jar_path: + with zipfile.ZipFile(output_jar.name, 'a') as out_zip: + path_transform = lambda p: p if p.endswith('.class') else None + zip_helpers.merge_zips(out_zip, [options.kotlin_jar_path], + path_transform=path_transform) if options.depfile: # GN already knows of the java files, so avoid listing individual java files # in the depfile. - depfile_deps = (options.bootclasspath + options.classpath + - options.processorpath + options.java_srcjars) - build_utils.WriteDepfile(options.depfile, options.jar_path, depfile_deps) + depfile_deps = (options.classpath + options.processorpath + + options.java_srcjars) + action_helpers.write_depfile(options.depfile, options.jar_path, + depfile_deps) if __name__ == '__main__': diff --git a/build/android/gyp/turbine.pydeps b/build/android/gyp/turbine.pydeps index f0b2411e581a..3d20f2ef4cec 100644 --- a/build/android/gyp/turbine.pydeps +++ b/build/android/gyp/turbine.pydeps @@ -1,6 +1,33 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/turbine.pydeps build/android/gyp/turbine.py +../../../third_party/catapult/devil/devil/__init__.py +../../../third_party/catapult/devil/devil/android/__init__.py +../../../third_party/catapult/devil/devil/android/constants/__init__.py +../../../third_party/catapult/devil/devil/android/constants/chrome.py +../../../third_party/catapult/devil/devil/android/sdk/__init__.py +../../../third_party/catapult/devil/devil/android/sdk/keyevent.py +../../../third_party/catapult/devil/devil/android/sdk/version_codes.py +../../../third_party/catapult/devil/devil/constants/__init__.py +../../../third_party/catapult/devil/devil/constants/exit_codes.py +../../../third_party/colorama/src/colorama/__init__.py +../../../third_party/colorama/src/colorama/ansi.py +../../../third_party/colorama/src/colorama/ansitowin32.py +../../../third_party/colorama/src/colorama/initialise.py +../../../third_party/colorama/src/colorama/win32.py +../../../third_party/colorama/src/colorama/winterm.py +../../../tools/android/modularization/convenience/lookup_dep.py +../../action_helpers.py ../../gn_helpers.py +../../print_python_deps.py +../../zip_helpers.py +../list_java_targets.py +../pylib/__init__.py +../pylib/constants/__init__.py +compile_java.py +javac_output_processor.py turbine.py util/__init__.py util/build_utils.py +util/jar_info_utils.py +util/md5_check.py +util/server_utils.py diff --git a/build/android/gyp/unused_resources.py b/build/android/gyp/unused_resources.py new file mode 100755 index 000000000000..d7578ce709f2 --- /dev/null +++ b/build/android/gyp/unused_resources.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python3 +# encoding: utf-8 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import sys + +from util import build_utils +from util import resource_utils +import action_helpers # build_utils adds //build to sys.path. + + +def _FilterUnusedResources(r_text_in, r_text_out, unused_resources_config): + removed_resources = set() + with open(unused_resources_config, encoding='utf-8') as output_config: + for line in output_config: + # example line: attr/line_height#remove + resource = line.split('#')[0] + resource_type, resource_name = resource.split('/') + removed_resources.add((resource_type, resource_name)) + kept_lines = [] + with open(r_text_in, encoding='utf-8') as infile: + for line in infile: + # example line: int attr line_height 0x7f0014ee + resource_type, resource_name = line.split(' ')[1:3] + if (resource_type, resource_name) not in removed_resources: + kept_lines.append(line) + + with open(r_text_out, 'w', encoding='utf-8') as out_file: + out_file.writelines(kept_lines) + + +def main(args): + parser = argparse.ArgumentParser() + + action_helpers.add_depfile_arg(parser) + parser.add_argument('--script', + required=True, + help='Path to the unused resources detector script.') + parser.add_argument( + '--dependencies-res-zips', + required=True, + action='append', + help='Resources zip archives to investigate for unused resources.') + parser.add_argument('--dexes', + action='append', + required=True, + help='Path to dex file, or zip with dex files.') + parser.add_argument( + '--proguard-mapping', + help='Path to proguard mapping file for the optimized dex.') + parser.add_argument('--r-text-in', required=True, help='Path to input R.txt') + parser.add_argument( + '--r-text-out', + help='Path to output R.txt with unused resources removed.') + parser.add_argument('--android-manifests', + action='append', + required=True, + help='Path to AndroidManifest') + parser.add_argument('--output-config', + required=True, + help='Path to output the aapt2 config to.') + args = build_utils.ExpandFileArgs(args) + options = parser.parse_args(args) + options.dependencies_res_zips = (action_helpers.parse_gn_list( + options.dependencies_res_zips)) + + # in case of no resources, short circuit early. + if not options.dependencies_res_zips: + build_utils.Touch(options.output_config) + return + + with build_utils.TempDir() as temp_dir: + dep_subdirs = [] + for dependency_res_zip in options.dependencies_res_zips: + dep_subdirs += resource_utils.ExtractDeps([dependency_res_zip], temp_dir) + + cmd = [ + options.script, + '--rtxts', + options.r_text_in, + '--manifests', + ':'.join(options.android_manifests), + '--resourceDirs', + ':'.join(dep_subdirs), + '--dexes', + ':'.join(options.dexes), + '--outputConfig', + options.output_config, + ] + if options.proguard_mapping: + cmd += [ + '--mapping', + options.proguard_mapping, + ] + build_utils.CheckOutput(cmd) + + if options.r_text_out: + _FilterUnusedResources(options.r_text_in, options.r_text_out, + options.output_config) + + if options.depfile: + depfile_deps = (options.dependencies_res_zips + options.android_manifests + + options.dexes) + [options.r_text_in] + if options.proguard_mapping: + depfile_deps.append(options.proguard_mapping) + action_helpers.write_depfile(options.depfile, options.output_config, + depfile_deps) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/build/android/gyp/unused_resources.pydeps b/build/android/gyp/unused_resources.pydeps new file mode 100644 index 000000000000..b4da89a95eb0 --- /dev/null +++ b/build/android/gyp/unused_resources.pydeps @@ -0,0 +1,30 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/unused_resources.pydeps build/android/gyp/unused_resources.py +../../../third_party/jinja2/__init__.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py +../../../third_party/jinja2/bccache.py +../../../third_party/jinja2/compiler.py +../../../third_party/jinja2/defaults.py +../../../third_party/jinja2/environment.py +../../../third_party/jinja2/exceptions.py +../../../third_party/jinja2/filters.py +../../../third_party/jinja2/idtracking.py +../../../third_party/jinja2/lexer.py +../../../third_party/jinja2/loaders.py +../../../third_party/jinja2/nodes.py +../../../third_party/jinja2/optimizer.py +../../../third_party/jinja2/parser.py +../../../third_party/jinja2/runtime.py +../../../third_party/jinja2/tests.py +../../../third_party/jinja2/utils.py +../../../third_party/jinja2/visitor.py +../../../third_party/markupsafe/__init__.py +../../../third_party/markupsafe/_compat.py +../../../third_party/markupsafe/_native.py +../../action_helpers.py +../../gn_helpers.py +unused_resources.py +util/__init__.py +util/build_utils.py +util/resource_utils.py diff --git a/build/android/gyp/util/__init__.py b/build/android/gyp/util/__init__.py index 96196cffb272..5ffa28413724 100644 --- a/build/android/gyp/util/__init__.py +++ b/build/android/gyp/util/__init__.py @@ -1,3 +1,3 @@ -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/gyp/util/build_utils.py b/build/android/gyp/util/build_utils.py index d1d3a726731d..f88518210b5b 100644 --- a/build/android/gyp/util/build_utils.py +++ b/build/android/gyp/util/build_utils.py @@ -1,4 +1,4 @@ -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -14,11 +14,13 @@ import os import pipes import re +import shlex import shutil import stat import subprocess import sys import tempfile +import textwrap import time import zipfile @@ -36,26 +38,19 @@ JAVA_HOME = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current') JAVAC_PATH = os.path.join(JAVA_HOME, 'bin', 'javac') JAVAP_PATH = os.path.join(JAVA_HOME, 'bin', 'javap') -RT_JAR_PATH = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'extras', - 'java_8', 'jre', 'lib', 'rt.jar') - -try: - string_types = basestring -except NameError: - string_types = (str, bytes) - - -def JavaCmd(verify=True, xmx='1G'): +KOTLIN_HOME = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'kotlinc', 'current') +KOTLINC_PATH = os.path.join(KOTLIN_HOME, 'bin', 'kotlinc') +# Please avoid using this. Our JAVA_HOME is using a newer and actively patched +# JDK. +JAVA_11_HOME_DEPRECATED = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk11', + 'current') + +def JavaCmd(xmx='1G'): ret = [os.path.join(JAVA_HOME, 'bin', 'java')] # Limit heap to avoid Java not GC'ing when it should, and causing # bots to OOM when many java commands are runnig at the same time # https://crbug.com/1098333 ret += ['-Xmx' + xmx] - - # Disable bytecode verification for local builds gives a ~2% speed-up. - if not verify: - ret += ['-noverify'] - return ret @@ -97,35 +92,6 @@ def FindInDirectory(directory, filename_filter='*'): return files -def ParseGnList(value): - """Converts a "GN-list" command-line parameter into a list. - - Conversions handled: - * None -> [] - * '' -> [] - * 'asdf' -> ['asdf'] - * '["a", "b"]' -> ['a', 'b'] - * ['["a", "b"]', 'c'] -> ['a', 'b', 'c'] (flattened list) - - The common use for this behavior is in the Android build where things can - take lists of @FileArg references that are expanded via ExpandFileArgs. - """ - # Convert None to []. - if not value: - return [] - # Convert a list of GN lists to a flattened list. - if isinstance(value, list): - ret = [] - for arg in value: - ret.extend(ParseGnList(arg)) - return ret - # Convert normal GN list. - if value.startswith('['): - return gn_helpers.GNValueParser(value).ParseList() - # Convert a single string value to a list. - return [value] - - def CheckOptions(options, parser, required=None): if not required: return @@ -148,24 +114,7 @@ def WriteJson(obj, path, only_if_changed=False): @contextlib.contextmanager -def AtomicOutput(path, only_if_changed=True, mode='w+b'): - """Helper to prevent half-written outputs. - - Args: - path: Path to the final output file, which will be written atomically. - only_if_changed: If True (the default), do not touch the filesystem - if the content has not changed. - mode: The mode to open the file in (str). - Returns: - A python context manager that yelds a NamedTemporaryFile instance - that must be used by clients to write the data to. On exit, the - manager will try to replace the final output file with the - temporary one if necessary. The temporary file is always destroyed - on exit. - Example: - with build_utils.AtomicOutput(output_path) as tmp_file: - subprocess.check_call(['prog', '--output', tmp_file.name]) - """ +def _AtomicOutput(path, only_if_changed=True, mode='w+b'): # Create in same directory to ensure same filesystem when moving. dirname = os.path.dirname(path) if not os.path.exists(dirname): @@ -190,16 +139,21 @@ class CalledProcessError(Exception): exits with a non-zero exit code.""" def __init__(self, cwd, args, output): - super(CalledProcessError, self).__init__() + super().__init__() self.cwd = cwd self.args = args self.output = output def __str__(self): # A user should be able to simply copy and paste the command that failed - # into their shell. + # into their shell (unless it is more than 200 chars). + # User can set PRINT_FULL_COMMAND=1 to always print the full command. + print_full = os.environ.get('PRINT_FULL_COMMAND', '0') != '0' + full_cmd = shlex.join(self.args) + short_cmd = textwrap.shorten(full_cmd, width=200) + printed_cmd = full_cmd if print_full else short_cmd copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd), - ' '.join(map(pipes.quote, self.args))) + printed_cmd) return 'Command failed: {}\n{}'.format(copyable_command, self.output) @@ -254,6 +208,7 @@ def CheckOutput(args, if not cwd: cwd = os.getcwd() + logging.info('CheckOutput: %s', ' '.join(args)) child = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env) stdout, stderr = child.communicate() @@ -279,18 +234,25 @@ def CheckOutput(args, has_stdout = print_stdout and stdout has_stderr = print_stderr and stderr - if fail_on_output and (has_stdout or has_stderr): - MSG = """\ -Command failed because it wrote to {}. -You can often set treat_warnings_as_errors=false to not treat output as \ -failure (useful when developing locally).""" + if has_stdout or has_stderr: if has_stdout and has_stderr: - stream_string = 'stdout and stderr' + stream_name = 'stdout and stderr' elif has_stdout: - stream_string = 'stdout' + stream_name = 'stdout' else: - stream_string = 'stderr' - raise CalledProcessError(cwd, args, MSG.format(stream_string)) + stream_name = 'stderr' + + if fail_on_output: + MSG = """ +Command failed because it wrote to {}. +You can often set treat_warnings_as_errors=false to not treat output as \ +failure (useful when developing locally). +""" + raise CalledProcessError(cwd, args, MSG.format(stream_name)) + + short_cmd = textwrap.shorten(shlex.join(args), width=200) + sys.stderr.write( + f'\nThe above {stream_name} output was from: {short_cmd}\n') return stdout @@ -367,183 +329,6 @@ def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None, return extracted -def HermeticDateTime(timestamp=None): - """Returns a constant ZipInfo.date_time tuple. - - Args: - timestamp: Unix timestamp to use for files in the archive. - - Returns: - A ZipInfo.date_time tuple for Jan 1, 2001, or the given timestamp. - """ - if not timestamp: - return (2001, 1, 1, 0, 0, 0) - utc_time = time.gmtime(timestamp) - return (utc_time.tm_year, utc_time.tm_mon, utc_time.tm_mday, utc_time.tm_hour, - utc_time.tm_min, utc_time.tm_sec) - - -def HermeticZipInfo(*args, **kwargs): - """Creates a zipfile.ZipInfo with a constant timestamp and external_attr. - - If a date_time value is not provided in the positional or keyword arguments, - the default value from HermeticDateTime is used. - - Args: - See zipfile.ZipInfo. - - Returns: - A zipfile.ZipInfo. - """ - # The caller may have provided a date_time either as a positional parameter - # (args[1]) or as a keyword parameter. Use the default hermetic date_time if - # none was provided. - date_time = None - if len(args) >= 2: - date_time = args[1] - elif 'date_time' in kwargs: - date_time = kwargs['date_time'] - if not date_time: - kwargs['date_time'] = HermeticDateTime() - ret = zipfile.ZipInfo(*args, **kwargs) - ret.external_attr = (0o644 << 16) - return ret - - -def AddToZipHermetic(zip_file, - zip_path, - src_path=None, - data=None, - compress=None, - date_time=None): - """Adds a file to the given ZipFile with a hard-coded modified time. - - Args: - zip_file: ZipFile instance to add the file to. - zip_path: Destination path within the zip file (or ZipInfo instance). - src_path: Path of the source file. Mutually exclusive with |data|. - data: File data as a string. - compress: Whether to enable compression. Default is taken from ZipFile - constructor. - date_time: The last modification date and time for the archive member. - """ - assert (src_path is None) != (data is None), ( - '|src_path| and |data| are mutually exclusive.') - if isinstance(zip_path, zipfile.ZipInfo): - zipinfo = zip_path - zip_path = zipinfo.filename - else: - zipinfo = HermeticZipInfo(filename=zip_path, date_time=date_time) - - _CheckZipPath(zip_path) - - if src_path and os.path.islink(src_path): - zipinfo.filename = zip_path - zipinfo.external_attr |= stat.S_IFLNK << 16 # mark as a symlink - zip_file.writestr(zipinfo, os.readlink(src_path)) - return - - # zipfile.write() does - # external_attr = (os.stat(src_path)[0] & 0xFFFF) << 16 - # but we want to use _HERMETIC_FILE_ATTR, so manually set - # the few attr bits we care about. - if src_path: - st = os.stat(src_path) - for mode in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH): - if st.st_mode & mode: - zipinfo.external_attr |= mode << 16 - - if src_path: - with open(src_path, 'rb') as f: - data = f.read() - - # zipfile will deflate even when it makes the file bigger. To avoid - # growing files, disable compression at an arbitrary cut off point. - if len(data) < 16: - compress = False - - # None converts to ZIP_STORED, when passed explicitly rather than the - # default passed to the ZipFile constructor. - compress_type = zip_file.compression - if compress is not None: - compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED - zip_file.writestr(zipinfo, data, compress_type) - - -def DoZip(inputs, - output, - base_dir=None, - compress_fn=None, - zip_prefix_path=None, - timestamp=None): - """Creates a zip file from a list of files. - - Args: - inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples. - output: Path, fileobj, or ZipFile instance to add files to. - base_dir: Prefix to strip from inputs. - compress_fn: Applied to each input to determine whether or not to compress. - By default, items will be |zipfile.ZIP_STORED|. - zip_prefix_path: Path prepended to file path in zip file. - timestamp: Unix timestamp to use for files in the archive. - """ - if base_dir is None: - base_dir = '.' - input_tuples = [] - for tup in inputs: - if isinstance(tup, string_types): - tup = (os.path.relpath(tup, base_dir), tup) - if tup[0].startswith('..'): - raise Exception('Invalid zip_path: ' + tup[0]) - input_tuples.append(tup) - - # Sort by zip path to ensure stable zip ordering. - input_tuples.sort(key=lambda tup: tup[0]) - - out_zip = output - if not isinstance(output, zipfile.ZipFile): - out_zip = zipfile.ZipFile(output, 'w') - - date_time = HermeticDateTime(timestamp) - try: - for zip_path, fs_path in input_tuples: - if zip_prefix_path: - zip_path = os.path.join(zip_prefix_path, zip_path) - compress = compress_fn(zip_path) if compress_fn else None - AddToZipHermetic(out_zip, - zip_path, - src_path=fs_path, - compress=compress, - date_time=date_time) - finally: - if output is not out_zip: - out_zip.close() - - -def ZipDir(output, base_dir, compress_fn=None, zip_prefix_path=None): - """Creates a zip file from a directory.""" - inputs = [] - for root, _, files in os.walk(base_dir): - for f in files: - inputs.append(os.path.join(root, f)) - - if isinstance(output, zipfile.ZipFile): - DoZip( - inputs, - output, - base_dir, - compress_fn=compress_fn, - zip_prefix_path=zip_prefix_path) - else: - with AtomicOutput(output) as f: - DoZip( - inputs, - f, - base_dir, - compress_fn=compress_fn, - zip_prefix_path=zip_prefix_path) - - def MatchesGlob(path, filters): """Returns whether the given path matches any of the given glob patterns.""" return filters and any(fnmatch.fnmatch(path, f) for f in filters) @@ -560,12 +345,14 @@ def MergeZips(output, input_zips, path_transform=None, compress=None): compress: Overrides compression setting from origin zip entries. """ path_transform = path_transform or (lambda p: p) - added_names = set() out_zip = output if not isinstance(output, zipfile.ZipFile): out_zip = zipfile.ZipFile(output, 'w') + # Include paths in the existing zip here to avoid adding duplicate files. + added_names = set(out_zip.namelist()) + try: for in_file in input_zips: with zipfile.ZipFile(in_file, 'r') as in_zip: @@ -637,29 +424,6 @@ def log_exit(): atexit.register(log_exit) -def AddDepfileOption(parser): - # TODO(agrieve): Get rid of this once we've moved to argparse. - if hasattr(parser, 'add_option'): - func = parser.add_option - else: - func = parser.add_argument - func('--depfile', - help='Path to depfile (refer to `gn help depfile`)') - - -def WriteDepfile(depfile_path, first_gn_output, inputs=None): - assert depfile_path != first_gn_output # http://crbug.com/646165 - assert not isinstance(inputs, string_types) # Easy mistake to make - inputs = inputs or [] - MakeDirectory(os.path.dirname(depfile_path)) - # Ninja does not support multiple outputs in depfiles. - with open(depfile_path, 'w') as depfile: - depfile.write(first_gn_output.replace(' ', '\\ ')) - depfile.write(': ') - depfile.write(' '.join(i.replace(' ', '\\ ') for i in inputs)) - depfile.write('\n') - - def ExpandFileArgs(args): """Replaces file-arg placeholders in args. @@ -704,7 +468,7 @@ def get_key(key): raise Exception('Expected single item list but got %s' % expansion) expansion = expansion[0] - # This should match ParseGnList. The output is either a GN-formatted list + # This should match parse_gn_list. The output is either a GN-formatted list # or a literal (with no quotes). if isinstance(expansion, list): new_args[i] = (arg[:match.start()] + gn_helpers.ToGNString(expansion) + diff --git a/build/android/gyp/util/build_utils_test.py b/build/android/gyp/util/build_utils_test.py index 008ea11748cb..44528c9215de 100755 --- a/build/android/gyp/util/build_utils_test.py +++ b/build/android/gyp/util/build_utils_test.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/gyp/util/diff_utils.py b/build/android/gyp/util/diff_utils.py index 530a6881916b..445bbe3d21be 100644 --- a/build/android/gyp/util/diff_utils.py +++ b/build/android/gyp/util/diff_utils.py @@ -1,12 +1,13 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +import difflib import os import sys -import difflib from util import build_utils +import action_helpers # build_utils adds //build to sys.path. def _SkipOmitted(line): @@ -34,7 +35,15 @@ def _GenerateDiffWithOnlyAdditons(expected_path, actual_data): '{}\n'.format(l.rstrip()) for l in actual_data.splitlines() if l.strip() ] - diff = difflib.ndiff(expected_lines, actual_lines) + # This helps the diff to not over-anchor on comments or closing braces in + # proguard configs. + def is_junk_line(l): + l = l.strip() + if l.startswith('# File:'): + return False + return l == '' or l == '}' or l.startswith('#') + + diff = difflib.ndiff(expected_lines, actual_lines, linejunk=is_junk_line) filtered_diff = (l for l in diff if l.startswith('+')) return ''.join(filtered_diff) @@ -88,7 +97,7 @@ def AddCommandLineFlags(parser): def CheckExpectations(actual_data, options, custom_msg=''): if options.actual_file: - with build_utils.AtomicOutput(options.actual_file) as f: + with action_helpers.atomic_output(options.actual_file) as f: f.write(actual_data.encode('utf8')) if options.expected_file_base: actual_data = _GenerateDiffWithOnlyAdditons(options.expected_file_base, diff --git a/build/android/gyp/util/jar_info_utils.py b/build/android/gyp/util/jar_info_utils.py index 975945510e32..3a895c2a81a3 100644 --- a/build/android/gyp/util/jar_info_utils.py +++ b/build/android/gyp/util/jar_info_utils.py @@ -1,4 +1,4 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/gyp/util/java_cpp_utils.py b/build/android/gyp/util/java_cpp_utils.py index 5180400d6161..46f05f66241b 100644 --- a/build/android/gyp/util/java_cpp_utils.py +++ b/build/android/gyp/util/java_cpp_utils.py @@ -1,4 +1,4 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -38,7 +38,7 @@ def KCamelToShouty(s): return s.upper() -class JavaString(object): +class JavaString: def __init__(self, name, value, comments): self.name = KCamelToShouty(name) self.value = value @@ -67,7 +67,7 @@ def ParseTemplateFile(lines): # TODO(crbug.com/937282): Work will be needed if we want to annotate specific # constants in the file to be parsed. -class CppConstantParser(object): +class CppConstantParser: """Parses C++ constants, retaining their comments. The Delegate subclass is responsible for matching and extracting the @@ -76,7 +76,7 @@ class CppConstantParser(object): """ SINGLE_LINE_COMMENT_RE = re.compile(r'\s*(// [^\n]*)') - class Delegate(object): + class Delegate: def ExtractConstantName(self, line): """Extracts a constant's name from line or None if not a match.""" raise NotImplementedError() @@ -149,9 +149,8 @@ def _ParseComment(self, line): self._in_comment = True self._in_variable = True return True - else: - self._in_comment = False - return False + self._in_comment = False + return False def _ParseVariable(self, line): current_name = self._delegate.ExtractConstantName(line) @@ -164,9 +163,8 @@ def _ParseVariable(self, line): else: self._in_variable = True return True - else: - self._in_variable = False - return False + self._in_variable = False + return False def _ParseLine(self, line): if not self._in_variable: diff --git a/build/android/gyp/util/manifest_utils.py b/build/android/gyp/util/manifest_utils.py index a517708b5979..3202058b616b 100644 --- a/build/android/gyp/util/manifest_utils.py +++ b/build/android/gyp/util/manifest_utils.py @@ -1,4 +1,4 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -10,9 +10,10 @@ import shlex import sys import xml.dom.minidom as minidom +from xml.etree import ElementTree from util import build_utils -from xml.etree import ElementTree +import action_helpers # build_utils adds //build to sys.path. ANDROID_NAMESPACE = 'http://schemas.android.com/apk/res/android' TOOLS_NAMESPACE = 'http://schemas.android.com/tools' @@ -45,6 +46,14 @@ def _RegisterElementTreeNamespaces(): ElementTree.register_namespace('dist', DIST_NAMESPACE) +def NamespacedGet(node, key): + return node.get('{%s}%s' % (ANDROID_NAMESPACE, key)) + + +def NamespacedSet(node, key, value): + node.set('{%s}%s' % (ANDROID_NAMESPACE, key), value) + + def ParseManifest(path): """Parses an AndroidManifest.xml using ElementTree. @@ -63,6 +72,7 @@ def ParseManifest(path): manifest_node = doc.getroot() else: manifest_node = doc.find('manifest') + assert manifest_node is not None, 'Manifest is none for path ' + path app_node = doc.find('application') if app_node is None: @@ -72,7 +82,7 @@ def ParseManifest(path): def SaveManifest(doc, path): - with build_utils.AtomicOutput(path) as f: + with action_helpers.atomic_output(path) as f: f.write(ElementTree.tostring(doc.getroot(), encoding='UTF-8')) @@ -80,47 +90,27 @@ def GetPackage(manifest_node): return manifest_node.get('package') -def AssertUsesSdk(manifest_node, - min_sdk_version=None, - target_sdk_version=None, - max_sdk_version=None, - fail_if_not_exist=False): - """Asserts values of attributes of element. - - Unless |fail_if_not_exist| is true, will only assert if both the passed value - is not None and the value of attribute exist. If |fail_if_not_exist| is true - will fail if passed value is not None but attribute does not exist. - """ +def SetUsesSdk(manifest_node, + target_sdk_version, + min_sdk_version, + max_sdk_version=None): uses_sdk_node = manifest_node.find('./uses-sdk') if uses_sdk_node is None: - return - for prefix, sdk_version in (('min', min_sdk_version), ('target', - target_sdk_version), - ('max', max_sdk_version)): - value = uses_sdk_node.get('{%s}%sSdkVersion' % (ANDROID_NAMESPACE, prefix)) - if fail_if_not_exist and not value and sdk_version: - assert False, ( - '%sSdkVersion in Android manifest does not exist but we expect %s' % - (prefix, sdk_version)) - if not value or not sdk_version: - continue - assert value == sdk_version, ( - '%sSdkVersion in Android manifest is %s but we expect %s' % - (prefix, value, sdk_version)) + uses_sdk_node = ElementTree.SubElement(manifest_node, 'uses-sdk') + NamespacedSet(uses_sdk_node, 'targetSdkVersion', target_sdk_version) + NamespacedSet(uses_sdk_node, 'minSdkVersion', min_sdk_version) + if max_sdk_version: + NamespacedSet(uses_sdk_node, 'maxSdkVersion', max_sdk_version) -def AssertPackage(manifest_node, package): - """Asserts that manifest package has desired value. - - Will only assert if both |package| is not None and the package is set in the - manifest. - """ - package_value = GetPackage(manifest_node) - if package_value is None or package is None: - return - assert package_value == package, ( - 'Package in Android manifest is %s but we expect %s' % (package_value, - package)) +def SetTargetApiIfUnset(manifest_node, target_sdk_version): + uses_sdk_node = manifest_node.find('./uses-sdk') + if uses_sdk_node is None: + uses_sdk_node = ElementTree.SubElement(manifest_node, 'uses-sdk') + curr_target_sdk_version = NamespacedGet(uses_sdk_node, 'targetSdkVersion') + if curr_target_sdk_version is None: + NamespacedSet(uses_sdk_node, 'targetSdkVersion', target_sdk_version) + return curr_target_sdk_version is None def _SortAndStripElementTree(root): @@ -195,7 +185,7 @@ def _CreateNodeHash(lines): if cur_indent != -1 and cur_indent <= target_indent: tag_lines = lines[:i + 1] break - elif not tag_closed and 'android:name="' in l: + if not tag_closed and 'android:name="' in l: # To reduce noise of node tags changing, use android:name as the # basis the hash since they usually unique. tag_lines = [l] @@ -214,7 +204,7 @@ def _IsSelfClosing(lines): idx = l.find('>') if idx != -1: return l[idx - 1] == '/' - assert False, 'Did not find end of tag:\n' + '\n'.join(lines) + raise RuntimeError('Did not find end of tag:\n%s' % '\n'.join(lines)) def _AddDiffTags(lines): @@ -251,7 +241,8 @@ def _AddDiffTags(lines): assert not hash_stack, 'hash_stack was not empty:\n' + '\n'.join(hash_stack) -def NormalizeManifest(manifest_contents): +def NormalizeManifest(manifest_contents, version_code_offset, + library_version_offset): _RegisterElementTreeNamespaces() # This also strips comments and sorts node attributes alphabetically. root = ElementTree.fromstring(manifest_contents) @@ -266,14 +257,24 @@ def NormalizeManifest(manifest_contents): if debuggable_name in app_node.attrib: del app_node.attrib[debuggable_name] + version_code = NamespacedGet(root, 'versionCode') + if version_code and version_code_offset: + version_code = int(version_code) - int(version_code_offset) + NamespacedSet(root, 'versionCode', f'OFFSET={version_code}') + version_name = NamespacedGet(root, 'versionName') + if version_name: + version_name = re.sub(r'\d+', '#', version_name) + NamespacedSet(root, 'versionName', version_name) + # Trichrome's static library version number is updated daily. To avoid # frequent manifest check failures, we remove the exact version number # during normalization. for node in app_node: - if (node.tag in ['uses-static-library', 'static-library'] - and '{%s}version' % ANDROID_NAMESPACE in node.keys() - and '{%s}name' % ANDROID_NAMESPACE in node.keys()): - node.set('{%s}version' % ANDROID_NAMESPACE, '$VERSION_NUMBER') + if node.tag in ['uses-static-library', 'static-library']: + version = NamespacedGet(node, 'version') + if version and library_version_offset: + version = int(version) - int(library_version_offset) + NamespacedSet(node, 'version', f'OFFSET={version}') # We also remove the exact package name (except the one at the root level) # to avoid noise during manifest comparison. diff --git a/build/android/gyp/util/manifest_utils_test.py b/build/android/gyp/util/manifest_utils_test.py index 52bf458a59f3..165df4c5575b 100755 --- a/build/android/gyp/util/manifest_utils_test.py +++ b/build/android/gyp/util/manifest_utils_test.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -14,6 +14,8 @@ _TEST_MANIFEST = """\ @@ -52,6 +54,8 @@ xmlns:android="http://schemas.android.com/apk/res/android" xmlns:tools="http://schemas.android.com/tools" package="test.pkg" + android:versionCode="OFFSET=4" + android:versionName="#.#.#.#" tools:ignore="MissingVersion"> @@ -106,19 +110,19 @@ class ManifestUtilsTest(unittest.TestCase): def testNormalizeManifest_golden(self): test_manifest, expected = _CreateTestData() - actual = manifest_utils.NormalizeManifest(test_manifest) + actual = manifest_utils.NormalizeManifest(test_manifest, 1230, None) self.assertMultiLineEqual(expected, actual) def testNormalizeManifest_nameUsedForActivity(self): test_manifest, expected = _CreateTestData(extra_activity_attr='a="b"') - actual = manifest_utils.NormalizeManifest(test_manifest) + actual = manifest_utils.NormalizeManifest(test_manifest, 1230, None) # Checks that the DIFF-ANCHOR does not change with the added attribute. self.assertMultiLineEqual(expected, actual) def testNormalizeManifest_nameNotUsedForIntentFilter(self): test_manifest, expected = _CreateTestData( extra_intent_filter_elem='', intent_filter_diff_anchor='5f5c8a70') - actual = manifest_utils.NormalizeManifest(test_manifest) + actual = manifest_utils.NormalizeManifest(test_manifest, 1230, None) # Checks that the DIFF-ANCHOR does change with the added element despite # having a nested element with an android:name set. self.assertMultiLineEqual(expected, actual) diff --git a/build/android/gyp/util/md5_check.py b/build/android/gyp/util/md5_check.py index 87ee723c8546..269ae284076c 100644 --- a/build/android/gyp/util/md5_check.py +++ b/build/android/gyp/util/md5_check.py @@ -1,8 +1,7 @@ -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import print_function import difflib import hashlib @@ -13,8 +12,7 @@ import zipfile from util import build_utils - -sys.path.insert(1, os.path.join(build_utils.DIR_SOURCE_ROOT, 'build')) +import action_helpers # build_utils adds //build to sys.path. import print_python_deps # When set and a difference is detected, a diff of what changed is printed. @@ -67,7 +65,7 @@ def CallAndWriteDepfileIfStale(on_stale_md5, # on bots that build with & without patch, and the patch changes the depfile # location. if hasattr(options, 'depfile') and options.depfile: - build_utils.WriteDepfile(options.depfile, output_paths[0], depfile_deps) + action_helpers.write_depfile(options.depfile, output_paths[0], depfile_deps) def CallAndRecordIfStale(function, @@ -158,7 +156,7 @@ def CallAndRecordIfStale(function, new_metadata.ToFile(f) -class Changes(object): +class Changes: """Provides and API for querying what changed between runs.""" def __init__(self, old_metadata, new_metadata, force, missing_outputs, @@ -262,11 +260,11 @@ def DescribeDifference(self): """Returns a human-readable description of what changed.""" if self.force: return 'force=True' - elif self.missing_outputs: + if self.missing_outputs: return 'Outputs do not exist:\n ' + '\n '.join(self.missing_outputs) - elif self.too_new: + if self.too_new: return 'Outputs newer than stamp file:\n ' + '\n '.join(self.too_new) - elif self.old_metadata is None: + if self.old_metadata is None: return 'Previous stamp file not found.' if self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5(): @@ -294,7 +292,7 @@ def DescribeDifference(self): return 'I have no idea what changed (there is a bug).' -class _Metadata(object): +class _Metadata: """Data model for tracking change metadata. Args: diff --git a/build/android/gyp/util/md5_check_test.py b/build/android/gyp/util/md5_check_test.py index e11bbd50edcf..e1e940b4da31 100755 --- a/build/android/gyp/util/md5_check_test.py +++ b/build/android/gyp/util/md5_check_test.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/gyp/util/parallel.py b/build/android/gyp/util/parallel.py index c26875a71ca0..dec94c7a329c 100644 --- a/build/android/gyp/util/parallel.py +++ b/build/android/gyp/util/parallel.py @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Helpers related to multiprocessing. @@ -27,7 +27,7 @@ _fork_kwargs = None -class _ImmediateResult(object): +class _ImmediateResult: def __init__(self, value): self._value = value @@ -44,7 +44,7 @@ def successful(self): return True -class _ExceptionWrapper(object): +class _ExceptionWrapper: """Used to marshal exception messages back to main process.""" def __init__(self, msg, exception_type=None): @@ -57,7 +57,7 @@ def MaybeThrow(self): self.exception_type)('Originally caused by: ' + self.msg) -class _FuncWrapper(object): +class _FuncWrapper: """Runs on the fork()'ed side to catch exceptions and spread *args.""" def __init__(self, func): @@ -66,7 +66,10 @@ def __init__(self, func): self._func = func def __call__(self, index, _=None): + global _fork_kwargs try: + if _fork_kwargs is None: # Clarifies _fork_kwargs is map for pylint. + _fork_kwargs = {} return self._func(*_fork_params[index], **_fork_kwargs) except Exception as e: # Only keep the exception type for builtin exception types or else risk @@ -81,7 +84,7 @@ def __call__(self, index, _=None): return _ExceptionWrapper(traceback.format_exc()) -class _WrappedResult(object): +class _WrappedResult: """Allows for host-side logic to be run after child process has terminated. * Unregisters associated pool _all_pools. diff --git a/build/android/gyp/util/protoresources.py b/build/android/gyp/util/protoresources.py index 272574f1174c..11f877806612 100644 --- a/build/android/gyp/util/protoresources.py +++ b/build/android/gyp/util/protoresources.py @@ -1,10 +1,10 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Functions that modify resources in protobuf format. Format reference: -https://cs.android.com/android/platform/superproject/+/master:frameworks/base/tools/aapt2/Resources.proto +https://cs.android.com/search?q=f:aapt2.*Resources.proto """ import logging @@ -211,7 +211,7 @@ def process_func(filename, data): _ProcessZip(zip_path, process_func) -class _ResourceStripper(object): +class _ResourceStripper: def __init__(self, partial_path, keep_predicate): self.partial_path = partial_path self.keep_predicate = keep_predicate @@ -231,12 +231,12 @@ def _StripStyles(self, entry, type_and_name): for style in self._IterStyles(entry): entries = style.entry new_entries = [] - for entry in entries: - full_name = '{}/{}'.format(type_and_name, entry.key.name) + for e in entries: + full_name = '{}/{}'.format(type_and_name, e.key.name) if not self.keep_predicate(full_name): logging.debug('Stripped %s/%s', self.partial_path, full_name) else: - new_entries.append(entry) + new_entries.append(e) if len(new_entries) != len(entries): self._has_changes = True @@ -267,7 +267,7 @@ def StripTable(self, table): def _TableFromFlatBytes(data): - # https://cs.android.com/android/platform/superproject/+/master:frameworks/base/tools/aapt2/format/Container.cpp + # https://cs.android.com/search?q=f:aapt2.*Container.cpp size_idx = len(_FLAT_ARSC_HEADER) proto_idx = size_idx + 8 if data[:size_idx] != _FLAT_ARSC_HEADER: diff --git a/build/android/gyp/util/resource_utils.py b/build/android/gyp/util/resource_utils.py index 263b7c23484c..dac0ae7588be 100644 --- a/build/android/gyp/util/resource_utils.py +++ b/build/android/gyp/util/resource_utils.py @@ -1,8 +1,7 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -import argparse import collections import contextlib import itertools @@ -43,11 +42,11 @@ 'no': 'nb', # 'no' is not a real language. http://crbug.com/920960 } -_ALL_RESOURCE_TYPES = { +ALL_RESOURCE_TYPES = { 'anim', 'animator', 'array', 'attr', 'bool', 'color', 'dimen', 'drawable', - 'font', 'fraction', 'id', 'integer', 'interpolator', 'layout', 'menu', - 'mipmap', 'plurals', 'raw', 'string', 'style', 'styleable', 'transition', - 'xml' + 'font', 'fraction', 'id', 'integer', 'interpolator', 'layout', 'macro', + 'menu', 'mipmap', 'plurals', 'raw', 'string', 'style', 'styleable', + 'transition', 'xml' } AAPT_IGNORE_PATTERN = ':'.join([ @@ -238,7 +237,7 @@ def IterResourceFilesInDirectories(directories, yield path, archive_path -class ResourceInfoFile(object): +class ResourceInfoFile: """Helper for building up .res.info files.""" def __init__(self): @@ -348,6 +347,39 @@ def _FixPackageIds(resource_value): return resource_value.replace('0x00', '0x7f') +def ResolveStyleableReferences(r_txt_path): + # Convert lines like: + # int[] styleable ViewBack { 0x010100d4, com.android.webview.R.attr.backTint } + # to: + # int[] styleable ViewBack { 0x010100d4, 0xREALVALUE } + entries = _ParseTextSymbolsFile(r_txt_path) + lookup_table = {(e.resource_type, e.name): e.value for e in entries} + + sb = [] + with open(r_txt_path, encoding='utf8') as f: + for l in f: + if l.startswith('int[] styleable'): + brace_start = l.index('{') + 2 + brace_end = l.index('}') - 1 + values = [x for x in l[brace_start:brace_end].split(', ') if x] + new_values = [] + for v in values: + try: + if not v.startswith('0x'): + resource_type, name = v.split('.')[-2:] + new_values.append(lookup_table[(resource_type, name)]) + else: + new_values.append(v) + except: + logging.warning('Failed line: %r %r', l, v) + raise + l = l[:brace_start] + ', '.join(new_values) + l[brace_end:] + sb.append(l) + + with open(r_txt_path, 'w', encoding='utf8') as f: + f.writelines(sb) + + def _GetRTxtResourceNames(r_txt_path): """Parse an R.txt file and extract the set of resource names from it.""" return {entry.name for entry in _ParseTextSymbolsFile(r_txt_path)} @@ -486,15 +518,14 @@ def _IsResourceFinal(self, entry): if entry.resource_type == 'styleable' and entry.java_type != 'int[]': # A styleable constant may be exported as non-final after all. return not self.export_const_styleable - elif not self.has_constant_ids: + if not self.has_constant_ids: # Every resource is non-final return False - elif not self.resources_allowlist: + if not self.resources_allowlist: # No allowlist means all IDs are non-final. return True - else: - # Otherwise, only those in the - return entry.name not in self.resources_allowlist + # Otherwise, only those in the + return entry.name not in self.resources_allowlist def CreateRJavaFiles(srcjar_dir, @@ -505,7 +536,6 @@ def CreateRJavaFiles(srcjar_dir, srcjar_out, custom_root_package_name=None, grandparent_custom_package_name=None, - extra_main_r_text_files=None, ignore_mismatched_values=False): """Create all R.java files for a set of packages and R.txt files. @@ -526,7 +556,6 @@ def CreateRJavaFiles(srcjar_dir, as the grandparent_custom_package_name. The format of this package name is identical to custom_root_package_name. (eg. for vr grandparent_custom_package_name would be "base") - extra_main_r_text_files: R.txt files to be added to the root R.java file. ignore_mismatched_values: If True, ignores if a resource appears multiple times with different entry values (useful when all the values are dummy anyways). @@ -548,8 +577,6 @@ def CreateRJavaFiles(srcjar_dir, all_resources_by_type = collections.defaultdict(list) main_r_text_files = [main_r_txt_file] - if extra_main_r_text_files: - main_r_text_files.extend(extra_main_r_text_files) for r_txt_file in main_r_text_files: for entry in _ParseTextSymbolsFile(r_txt_file, fix_package_ids=True): entry_key = (entry.resource_type, entry.name) @@ -562,8 +589,8 @@ def CreateRJavaFiles(srcjar_dir, else: all_resources[entry_key] = entry all_resources_by_type[entry.resource_type].append(entry) - assert entry.resource_type in _ALL_RESOURCE_TYPES, ( - 'Unknown resource type: %s, add to _ALL_RESOURCE_TYPES!' % + assert entry.resource_type in ALL_RESOURCE_TYPES, ( + 'Unknown resource type: %s, add to ALL_RESOURCE_TYPES!' % entry.resource_type) if custom_root_package_name: @@ -583,8 +610,8 @@ def CreateRJavaFiles(srcjar_dir, with open(root_r_java_path, 'w') as f: f.write(root_java_file_contents) - for package in packages: - _CreateRJavaSourceFile(srcjar_dir, package, root_r_java_package, + for p in packages: + _CreateRJavaSourceFile(srcjar_dir, p, root_r_java_package, rjava_build_options) @@ -639,7 +666,7 @@ def _RenderRJavaSource(package, root_r_java_package, rjava_build_options): return template.render( package=package, - resource_types=sorted(_ALL_RESOURCE_TYPES), + resource_types=sorted(ALL_RESOURCE_TYPES), root_package=root_r_java_package, has_on_resources_loaded=rjava_build_options.has_on_resources_loaded) @@ -662,14 +689,6 @@ def _RenderRootRJavaSource(package, all_resources_by_type, rjava_build_options, else: non_final_resources_by_type[res_type].append(entry) - # Keep these assignments all on one line to make diffing against regular - # aapt-generated files easier. - create_id = ('{{ e.resource_type }}.{{ e.name }} ^= packageIdTransform;') - create_id_arr = ('{{ e.resource_type }}.{{ e.name }}[i] ^=' - ' packageIdTransform;') - for_loop_condition = ('int i = {{ startIndex(e) }}; i < ' - '{{ e.resource_type }}.{{ e.name }}.length; ++i') - # Here we diverge from what aapt does. Because we have so many # resources, the onResourcesLoaded method was exceeding the 64KB limit that # Java imposes. For this reason we split onResourcesLoaded into different @@ -680,6 +699,10 @@ def _RenderRootRJavaSource(package, all_resources_by_type, rjava_build_options, extends_string = 'extends {{ parent_path }}.R.{{ resource_type }} ' dep_path = GetCustomPackagePath(grandparent_custom_package_name) + # Don't actually mark fields as "final" or else R8 complain when aapt2 uses + # --proguard-conditional-keep-rules. E.g.: + # Rule precondition matches static final fields javac has inlined. + # Such rules are unsound as the shrinker cannot infer the inlining precisely. template = Template("""/* AUTO-GENERATED FILE. DO NOT MODIFY. */ package {{ package }}; @@ -688,7 +711,7 @@ def _RenderRootRJavaSource(package, all_resources_by_type, rjava_build_options, {% for resource_type in resource_types %} public static class {{ resource_type }} """ + extends_string + """ { {% for e in final_resources[resource_type] %} - public static final {{ e.java_type }} {{ e.name }} = {{ e.value }}; + public static {{ e.java_type }} {{ e.name }} = {{ e.value }}; {% endfor %} {% for e in non_final_resources[resource_type] %} {% if e.value != '0' %} @@ -705,29 +728,44 @@ def _RenderRootRJavaSource(package, all_resources_by_type, rjava_build_options, } {% else %} private static boolean sResourcesDidLoad; + + private static void patchArray( + int[] arr, int startIndex, int packageIdTransform) { + for (int i = startIndex; i < arr.length; ++i) { + arr[i] ^= packageIdTransform; + } + } + public static void onResourcesLoaded(int packageId) { if (sResourcesDidLoad) { return; } sResourcesDidLoad = true; int packageIdTransform = (packageId ^ 0x7f) << 24; + {# aapt2 makes int[] resources refer to other resources by reference + rather than by value. Thus, need to transform the int[] resources + first, before the referenced resources are transformed in order to + ensure the transform applies exactly once. + See https://crbug.com/1237059 for context. + #} {% for resource_type in resource_types %} - onResourcesLoaded{{ resource_type|title }}(packageIdTransform); {% for e in non_final_resources[resource_type] %} {% if e.java_type == 'int[]' %} - for(""" + for_loop_condition + """) { - """ + create_id_arr + """ - } + patchArray({{ e.resource_type }}.{{ e.name }}, {{ startIndex(e) }}, \ +packageIdTransform); {% endif %} {% endfor %} {% endfor %} + {% for resource_type in resource_types %} + onResourcesLoaded{{ resource_type|title }}(packageIdTransform); + {% endfor %} } {% for res_type in resource_types %} private static void onResourcesLoaded{{ res_type|title }} ( int packageIdTransform) { {% for e in non_final_resources[res_type] %} {% if res_type != 'styleable' and e.java_type != 'int[]' %} - """ + create_id + """ + {{ e.resource_type }}.{{ e.name }} ^= packageIdTransform; {% endif %} {% endfor %} } @@ -740,7 +778,7 @@ def _RenderRootRJavaSource(package, all_resources_by_type, rjava_build_options, lstrip_blocks=True) return template.render( package=package, - resource_types=sorted(_ALL_RESOURCE_TYPES), + resource_types=sorted(ALL_RESOURCE_TYPES), has_on_resources_loaded=rjava_build_options.has_on_resources_loaded, fake_on_resources_loaded=rjava_build_options.fake_on_resources_loaded, final_resources=final_resources_by_type, @@ -761,7 +799,14 @@ def ExtractBinaryManifestValues(aapt2_path, apk_path): def ExtractArscPackage(aapt2_path, apk_path): - """Returns (package_name, package_id) of resources.arsc from apk_path.""" + """Returns (package_name, package_id) of resources.arsc from apk_path. + + When the apk does not have any entries in its resources file, in recent aapt2 + versions it will not contain a "Package" line. The package is not even in the + actual resources.arsc/resources.pb file (which itself is mostly empty). Thus + return (None, None) when dump succeeds and there are no errors to indicate + that the package name does not exist in the resources file. + """ proc = subprocess.Popen([aapt2_path, 'dump', 'resources', apk_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE) @@ -777,8 +822,11 @@ def ExtractArscPackage(aapt2_path, apk_path): # aapt2 currently crashes when dumping webview resources, but not until after # it prints the "Package" line (b/130553900). - sys.stderr.write(proc.stderr.read()) - raise Exception('Failed to find arsc package name') + stderr_output = proc.stderr.read().decode('utf-8') + if stderr_output: + sys.stderr.write(stderr_output) + raise Exception('Failed to find arsc package name') + return None, None def _RenameSubdirsWithPrefix(dir_path, prefix): @@ -840,7 +888,7 @@ def ExtractDeps(dep_zips, deps_dir): return dep_subdirs -class _ResourceBuildContext(object): +class _ResourceBuildContext: """A temporary directory for packaging and compiling Android resources. Args: @@ -898,65 +946,6 @@ def BuildContext(temp_dir=None, keep_files=False): context.Close() -def ResourceArgsParser(): - """Create an argparse.ArgumentParser instance with common argument groups. - - Returns: - A tuple of (parser, in_group, out_group) corresponding to the parser - instance, and the input and output argument groups for it, respectively. - """ - parser = argparse.ArgumentParser(description=__doc__) - - input_opts = parser.add_argument_group('Input options') - output_opts = parser.add_argument_group('Output options') - - build_utils.AddDepfileOption(output_opts) - - input_opts.add_argument('--include-resources', required=True, action="append", - help='Paths to arsc resource files used to link ' - 'against. Can be specified multiple times.') - - input_opts.add_argument('--dependencies-res-zips', required=True, - help='Resources zip archives from dependents. Required to ' - 'resolve @type/foo references into dependent ' - 'libraries.') - - input_opts.add_argument( - '--r-text-in', - help='Path to pre-existing R.txt. Its resource IDs override those found ' - 'in the aapt-generated R.txt when generating R.java.') - - input_opts.add_argument( - '--extra-res-packages', - help='Additional package names to generate R.java files for.') - - return (parser, input_opts, output_opts) - - -def HandleCommonOptions(options): - """Handle common command-line options after parsing. - - Args: - options: the result of parse_args() on the parser returned by - ResourceArgsParser(). This function updates a few common fields. - """ - options.include_resources = [build_utils.ParseGnList(r) for r in - options.include_resources] - # Flatten list of include resources list to make it easier to use. - options.include_resources = [r for resources in options.include_resources - for r in resources] - - options.dependencies_res_zips = ( - build_utils.ParseGnList(options.dependencies_res_zips)) - - # Don't use [] as default value since some script explicitly pass "". - if options.extra_res_packages: - options.extra_res_packages = ( - build_utils.ParseGnList(options.extra_res_packages)) - else: - options.extra_res_packages = [] - - def ParseAndroidResourceStringsFromXml(xml_data): """Parse and Android xml resource file and extract strings from it. @@ -1005,7 +994,7 @@ def ParseAndroidResourceStringsFromXml(xml_data): raise Exception('Expected closing string tag: ' + input_data) text = input_data[:m2.start()] input_data = input_data[m2.end():] - if len(text) and text[0] == '"' and text[-1] == '"': + if len(text) != 0 and text[0] == '"' and text[-1] == '"': text = text[1:-1] result[name] = text diff --git a/build/android/gyp/util/resource_utils_test.py b/build/android/gyp/util/resource_utils_test.py index 62d5b431e9d1..4b31e9257866 100755 --- a/build/android/gyp/util/resource_utils_test.py +++ b/build/android/gyp/util/resource_utils_test.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # coding: utf-8 -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -19,7 +19,7 @@ os.path.join(os.path.dirname(__file__), os.pardir)) sys.path.insert(1, _BUILD_ANDROID_GYP_ROOT) -import resource_utils # pylint: disable=relative-import +import resource_utils # pylint: disable=line-too-long diff --git a/build/android/gyp/util/resources_parser.py b/build/android/gyp/util/resources_parser.py index 8d8d69cce873..86d85407d3b4 100644 --- a/build/android/gyp/util/resources_parser.py +++ b/build/android/gyp/util/resources_parser.py @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -9,6 +9,7 @@ from util import build_utils from util import resource_utils +import action_helpers # build_utils adds //build to sys.path. _TextSymbolEntry = collections.namedtuple( 'RTextEntry', ('java_type', 'resource_type', 'name', 'value')) @@ -21,7 +22,7 @@ def _ResourceNameToJavaSymbol(resource_name): return re.sub('[\.:]', '_', resource_name) -class RTxtGenerator(object): +class RTxtGenerator: def __init__(self, res_dirs, ignore_pattern=resource_utils.AAPT_IGNORE_PATTERN): @@ -73,13 +74,19 @@ def _ExtractNewIdsFromNode(self, node): ret.update(self._ExtractNewIdsFromNode(child)) return ret + def _ParseXml(self, xml_path): + try: + return ElementTree.parse(xml_path).getroot() + except Exception as e: + raise RuntimeError('Failure parsing {}:\n'.format(xml_path)) from e + def _ExtractNewIdsFromXml(self, xml_path): - root = ElementTree.parse(xml_path).getroot() - return self._ExtractNewIdsFromNode(root) + return self._ExtractNewIdsFromNode(self._ParseXml(xml_path)) def _ParseValuesXml(self, xml_path): ret = set() - root = ElementTree.parse(xml_path).getroot() + root = self._ParseXml(xml_path) + assert root.tag == 'resources' for child in root: if child.tag == 'eat-comment': @@ -91,12 +98,18 @@ def _ParseValuesXml(self, xml_path): if child.tag == 'declare-styleable': ret.update(self._ParseDeclareStyleable(child)) else: - if child.tag == 'item': + if child.tag in ('item', 'public'): resource_type = child.attrib['type'] elif child.tag in ('array', 'integer-array', 'string-array'): resource_type = 'array' else: resource_type = child.tag + parsed_element = ElementTree.tostring(child, encoding='unicode').strip() + assert resource_type in resource_utils.ALL_RESOURCE_TYPES, ( + f'Infered resource type ({resource_type}) from xml entry ' + f'({parsed_element}) (found in {xml_path}) is not listed in ' + 'resource_utils.ALL_RESOURCE_TYPES. Teach resources_parser.py how ' + 'to parse this entry and/or add to the list.') name = _ResourceNameToJavaSymbol(child.attrib['name']) ret.add(_TextSymbolEntry('int', resource_type, name, _DUMMY_RTXT_ID)) return ret @@ -131,11 +144,11 @@ def _CollectResourcesListFromDirectories(self): ret = set() for res_dir in self.res_dirs: ret.update(self._CollectResourcesListFromDirectory(res_dir)) - return ret + return sorted(ret) def WriteRTxtFile(self, rtxt_path): resources = self._CollectResourcesListFromDirectories() - with build_utils.AtomicOutput(rtxt_path, mode='w') as f: + with action_helpers.atomic_output(rtxt_path, mode='w') as f: for resource in resources: line = '{0.java_type} {0.resource_type} {0.name} {0.value}\n'.format( resource) diff --git a/build/android/gyp/util/server_utils.py b/build/android/gyp/util/server_utils.py index e050ef65521f..b634cf978ed3 100644 --- a/build/android/gyp/util/server_utils.py +++ b/build/android/gyp/util/server_utils.py @@ -1,4 +1,4 @@ -# Copyright 2021 The Chromium Authors. All rights reserved. +# Copyright 2021 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -13,7 +13,7 @@ BUILD_SERVER_ENV_VARIABLE = 'INVOKED_BY_BUILD_SERVER' -def MaybeRunCommand(name, argv, stamp_file): +def MaybeRunCommand(name, argv, stamp_file, force): """Returns True if the command was successfully sent to the build server.""" # When the build server runs a command, it sets this environment variable. @@ -36,6 +36,12 @@ def MaybeRunCommand(name, argv, stamp_file): # [Errno 111] Connection refused. Either the server has not been started # or the server is not currently accepting new connections. if e.errno == 111: + if force: + raise RuntimeError( + '\n\nBuild server is not running and ' + 'android_static_analysis="build_server" is set.\nPlease run ' + 'this command in a separate terminal:\n\n' + '$ build/android/fast_local_dev_server.py\n\n') from None return False raise e return True diff --git a/build/android/gyp/util/zipalign.py b/build/android/gyp/util/zipalign.py deleted file mode 100644 index c5c4ea88c61a..000000000000 --- a/build/android/gyp/util/zipalign.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import struct -import sys -import zipfile - -from util import build_utils - -_FIXED_ZIP_HEADER_LEN = 30 - - -def _PatchedDecodeExtra(self): - # Try to decode the extra field. - extra = self.extra - unpack = struct.unpack - while len(extra) >= 4: - tp, ln = unpack('= 24: - counts = unpack('Target type `android_assets`: This type corresponds to targets used to group Android assets, i.e. liberal @@ -243,11 +234,7 @@ * `deps_info['public_deps_configs']`: List of paths to the `.build_config` files of *direct* dependencies of the current target which are exposed as part of the -current target's public API. This should be a subset of -deps_info['deps_configs']. - -* `deps_info['ignore_dependency_public_deps']`: If true, 'public_deps' will not -be collected from the current target's direct deps. +current target's public API. * `deps_info['unprocessed_jar_path']`: Path to the original .jar file for this target, before any kind of processing @@ -277,17 +264,18 @@ In this case, `deps_info['unprocessed_jar_path']` will point to the source `.jar` file. Otherwise, it will be point to a build-generated file. -* `deps_info['java_sources_file']`: -Path to a single `.sources` file listing all the Java sources that were used -to generate the library (simple text format, one `.jar` path per line). +* `deps_info['target_sources_file']`: +Path to a single `.sources` file listing all the Java and Kotlin sources that +were used to generate the library (simple text format, one `.jar` path per +line). * `deps_info['lint_android_manifest']`: Path to an AndroidManifest.xml file to use for this lint target. -* `deps_info['lint_java_sources']`: -The list of all `deps_info['java_sources_file']` entries for all library +* `deps_info['lint_sources']`: +The list of all `deps_info['target_sources_file']` entries for all library dependencies that are chromium code. Note: this is a list of files, where each -file contains a list of Java source files. This is used for lint. +file contains a list of Java and Kotlin source files. This is used for lint. * `deps_info['lint_aars']`: List of all aars from transitive java dependencies. This allows lint to collect @@ -342,8 +330,18 @@ collection of all `deps_info['device_jar_path']` entries for the target and all its dependencies. +* `deps_info['all_dex_files']`: +The list of paths to all `deps_info['dex_path']` entries for all libraries +that comprise this APK. Valid only for debug builds. + +* `deps_info['preferred_dep']`: +Whether the target should be the preferred dep. This is usually the case when we +have a java_group that depends on either the public or internal dep accordingly, +and it is better to depend on the group rather than the underlying dep. Another +case is for android_library_factory targets, the factory target should be +preferred instead of the actual implementation. -## Target type `junit_binary`: +## Target type `robolectric_binary`: A target type for JUnit-specific binaries. Identical to [`java_binary`](#target_java_binary) in the context of `.build_config` files, @@ -380,11 +378,7 @@ * `deps_info['final_dex']['path']`: Path to the final classes.dex file (or classes.zip in case of multi-dex) -for this APK. - -* `deps_info['final_dex']['all_dex_files']`: -The list of paths to all `deps_info['dex_path']` entries for all libraries -that comprise this APK. Valid only for debug builds. +for this APK - only used for proguarded builds. * `native['libraries']` List of native libraries for the primary ABI to be embedded in this APK. @@ -400,10 +394,6 @@ List of native libraries for the secondary ABI to be embedded in this APK. Empty if only a single ABI is supported. -* `native['uncompress_shared_libraries']` -A boolean indicating whether native libraries are stored uncompressed in the -APK. - * `native['loadable_modules']` A list of native libraries to store within the APK, in addition to those from `native['libraries']`. These correspond to things like the Chromium linker @@ -415,9 +405,6 @@ * `native['library_always_compress']` A list of library files that we always compress. -* `native['library_renames']` -A list of library files that we prepend "crazy." to their file names. - * `assets` A list of assets stored compressed in the APK. Each entry has the format `:`, where `` is relative to @@ -450,10 +437,11 @@ NOTE: This has nothing to do with *Android* resources. -* `jni['all_source']` -The list of all `deps_info['java_sources_file']` entries for all library +* `deps_info['jni_all_source']` +The list of all `deps_info['target_sources_file']` entries for all library dependencies for this APK. Note: this is a list of files, where each file -contains a list of Java source files. This is used for JNI registration. +contains a list of Java and Kotlin source files. This is used for JNI +registration. * `deps_info['proguard_all_configs']`: The collection of all 'deps_info['proguard_configs']` values from this target @@ -543,11 +531,6 @@ `android_apk` and others), and contains information related to the compilation of Java sources, class files, and jars. -* `javac['resource_packages']` -For `java_library` targets, this is the list of package names for all resource -dependencies for the current target. Order must match the one from -`javac['srcjars']`. For other target types, this key does not exist. - * `javac['classpath']` The classpath used to compile this target when annotation processors are present. @@ -572,38 +555,33 @@ --------------- END_MARKDOWN --------------------------------------------------- """ -from __future__ import print_function - import collections import itertools import json import optparse import os +import shutil import sys import xml.dom.minidom from util import build_utils from util import resource_utils - -# TODO(crbug.com/1174969): Remove this once Python2 is obsoleted. -if sys.version_info.major == 2: - zip_longest = itertools.izip_longest -else: - zip_longest = itertools.zip_longest +import action_helpers # build_utils adds //build to sys.path. # Types that should never be used as a dependency of another build config. _ROOT_TYPES = ('android_apk', 'java_binary', 'java_annotation_processor', - 'junit_binary', 'android_app_bundle') + 'robolectric_binary', 'android_app_bundle') # Types that should not allow code deps to pass through. _RESOURCE_TYPES = ('android_assets', 'android_resources', 'system_java_library') +# Cache of path -> JSON dict. +_dep_config_cache = {} + class OrderedSet(collections.OrderedDict): - # Value |parameter| is present to avoid presubmit warning due to different - # number of parameters from overridden method. @staticmethod - def fromkeys(iterable, value=None): + def fromkeys(iterable): out = OrderedSet() out.update(iterable) return out @@ -635,7 +613,8 @@ def _ExtractMarkdownDocumentation(input_text): return result -class AndroidManifest(object): + +class AndroidManifest: def __init__(self, path): self.path = path dom = xml.dom.minidom.parse(path) @@ -665,12 +644,15 @@ def GetPackageName(self): return self.manifest.getAttribute('package') -dep_config_cache = {} -def GetDepConfig(path): - if not path in dep_config_cache: +def GetDepConfigRoot(path): + if not path in _dep_config_cache: with open(path) as jsonfile: - dep_config_cache[path] = json.load(jsonfile)['deps_info'] - return dep_config_cache[path] + _dep_config_cache[path] = json.load(jsonfile) + return _dep_config_cache[path] + + +def GetDepConfig(path): + return GetDepConfigRoot(path)['deps_info'] def DepsOfType(wanted_type, configs): @@ -682,13 +664,20 @@ def DepPathsOfType(wanted_type, config_paths): def GetAllDepsConfigsInOrder(deps_config_paths, filter_func=None): - def GetDeps(path): + def apply_filter(paths): + if filter_func: + return [p for p in paths if filter_func(GetDepConfig(p))] + return paths + + def discover(path): config = GetDepConfig(path) - if filter_func and not filter_func(config): - return [] - return config['deps_configs'] + all_deps = config['deps_configs'] + config.get('public_deps_configs', []) + return apply_filter(all_deps) - return build_utils.GetSortedTransitiveDependencies(deps_config_paths, GetDeps) + deps_config_paths = apply_filter(deps_config_paths) + deps_config_paths = build_utils.GetSortedTransitiveDependencies( + deps_config_paths, discover) + return deps_config_paths def GetObjectByPath(obj, key_path): @@ -706,7 +695,7 @@ def RemoveObjDups(obj, base, *key_path): target[:] = [x for x in target if x not in base_target] -class Deps(object): +class Deps: def __init__(self, direct_deps_config_paths): self._all_deps_config_paths = GetAllDepsConfigsInOrder( direct_deps_config_paths) @@ -728,18 +717,6 @@ def Direct(self, wanted_type=None): return self._direct_deps_configs return DepsOfType(wanted_type, self._direct_deps_configs) - def DirectAndChildPublicDeps(self, wanted_type=None): - """Returns direct dependencies and dependencies exported via public_deps of - direct dependencies. - """ - dep_paths = set(self._direct_deps_config_paths) - for direct_dep in self._direct_deps_configs: - dep_paths.update(direct_dep.get('public_deps_configs', [])) - deps_list = [GetDepConfig(p) for p in dep_paths] - if wanted_type is None: - return deps_list - return DepsOfType(wanted_type, deps_list) - def AllConfigPaths(self): return self._all_deps_config_paths @@ -763,7 +740,9 @@ def helper(cur): if config['is_prebuilt']: pass elif config['gradle_treat_as_prebuilt']: - helper(Deps(config['deps_configs'])) + all_deps = config['deps_configs'] + config.get( + 'public_deps_configs', []) + helper(Deps(all_deps)) elif config not in ret: ret.append(config) @@ -792,7 +771,7 @@ def _MergeAssets(all_assets): dest_map = uncompressed if disable_compression else compressed other_map = compressed if disable_compression else uncompressed outputs = entry.get('outputs', []) - for src, dest in zip_longest(entry['sources'], outputs): + for src, dest in itertools.zip_longest(entry['sources'], outputs): if not dest: dest = os.path.basename(src) # Merge so that each path shows up in only one of the lists, and that @@ -803,30 +782,30 @@ def _MergeAssets(all_assets): locale_paks.add(dest) def create_list(asset_map): - ret = ['%s:%s' % (src, dest) for dest, src in asset_map.items()] # Sort to ensure deterministic ordering. - ret.sort() - return ret + items = sorted(asset_map.items()) + return [f'{src}:{dest}' for dest, src in items] return create_list(compressed), create_list(uncompressed), locale_paks -def _ResolveGroups(config_paths): +def _ResolveGroupsAndPublicDeps(config_paths): """Returns a list of configs with all groups inlined.""" - ret = list(config_paths) - ret_set = set(config_paths) - while True: - group_paths = DepPathsOfType('group', ret) - if not group_paths: - return ret - for group_path in group_paths: - index = ret.index(group_path) - expanded_config_paths = [] - for deps_config_path in GetDepConfig(group_path)['deps_configs']: - if not deps_config_path in ret_set: - expanded_config_paths.append(deps_config_path) - ret[index:index + 1] = expanded_config_paths - ret_set.update(expanded_config_paths) + + def helper(config_path): + config = GetDepConfig(config_path) + if config['type'] == 'group': + # Groups combine public_deps with deps_configs, so no need to check + # public_config_paths separately. + return config['deps_configs'] + if config['type'] == 'android_resources': + # android_resources targets do not support public_deps, but instead treat + # all resource deps as public deps. + return DepPathsOfType('android_resources', config['deps_configs']) + + return config.get('public_deps_configs', []) + + return build_utils.GetSortedTransitiveDependencies(config_paths, helper) def _DepsFromPaths(dep_paths, @@ -868,6 +847,18 @@ def _DepsFromPaths(dep_paths, return _DepsFromPathsWithFilters(dep_paths, blocklist, allowlist) +def _FilterConfigPaths(dep_paths, blocklist=None, allowlist=None): + if not blocklist and not allowlist: + return dep_paths + configs = [GetDepConfig(p) for p in dep_paths] + if blocklist: + configs = [c for c in configs if c['type'] not in blocklist] + if allowlist: + configs = [c for c in configs if c['type'] in allowlist] + + return [c['path'] for c in configs] + + def _DepsFromPathsWithFilters(dep_paths, blocklist=None, allowlist=None): """Resolves all groups and trims dependency branches that we never want. @@ -880,17 +871,17 @@ def _DepsFromPathsWithFilters(dep_paths, blocklist=None, allowlist=None): about (i.e. we wish to prune all other branches that do not start from one of these). """ - group_paths = DepPathsOfType('group', dep_paths) - config_paths = dep_paths - if group_paths: - config_paths = _ResolveGroups(dep_paths) + group_paths - configs = [GetDepConfig(p) for p in config_paths] - if blocklist: - configs = [c for c in configs if c['type'] not in blocklist] + # Filter both before and after so that public_deps of blocked targets are not + # added. + allowlist_with_groups = None if allowlist: - configs = [c for c in configs if c['type'] in allowlist] + allowlist_with_groups = set(allowlist) + allowlist_with_groups.add('group') + dep_paths = _FilterConfigPaths(dep_paths, blocklist, allowlist_with_groups) + dep_paths = _ResolveGroupsAndPublicDeps(dep_paths) + dep_paths = _FilterConfigPaths(dep_paths, blocklist, allowlist) - return Deps([c['path'] for c in configs]) + return Deps(dep_paths) def _ExtractSharedLibsFromRuntimeDeps(runtime_deps_file): @@ -946,10 +937,103 @@ def _CompareClasspathPriority(dep): return 1 if dep.get('low_classpath_priority') else 0 +def _DedupFeatureModuleSharedCode(uses_split_arg, modules, + field_names_to_dedup): + child_to_ancestors = collections.defaultdict(list) + if uses_split_arg: + for split_pair in uses_split_arg: + child, parent = split_pair.split(':') + assert child in modules + assert parent in modules + child_to_ancestors[child] = [parent] + + # Create a full list of ancestors for each module. + for name in modules: + if name == 'base': + continue + curr_name = name + while curr_name in child_to_ancestors: + parent = child_to_ancestors[curr_name][0] + if parent not in child_to_ancestors[name]: + child_to_ancestors[name].append(parent) + curr_name = parent + + if curr_name != 'base': + child_to_ancestors[name].append('base') + + # Strip out duplicates from ancestors. + for name, module in modules.items(): + if name == 'base': + continue + # Make sure we get all ancestors, not just direct parent. + for ancestor in child_to_ancestors[name]: + for f in field_names_to_dedup: + if f in module: + RemoveObjDups(module, modules[ancestor], f) + + # Strip out duplicates from siblings/cousins. + for f in field_names_to_dedup: + _PromoteToCommonAncestor(modules, child_to_ancestors, f) + + +def _PromoteToCommonAncestor(modules, child_to_ancestors, field_name): + module_to_fields_set = {} + for module_name, module in modules.items(): + if field_name in module: + module_to_fields_set[module_name] = set(module[field_name]) + + seen = set() + dupes = set() + for fields in module_to_fields_set.values(): + new_dupes = seen & fields + if new_dupes: + dupes |= new_dupes + seen |= fields + + for d in dupes: + owning_modules = [] + for module_name, fields in module_to_fields_set.items(): + if d in fields: + owning_modules.append(module_name) + assert len(owning_modules) >= 2 + # Rely on the fact that ancestors are inserted from closest to + # farthest, where "base" should always be the last element. + # Arbitrarily using the first owning module - any would work. + for ancestor in child_to_ancestors[owning_modules[0]]: + ancestor_is_shared_with_all = True + for o in owning_modules[1:]: + if ancestor not in child_to_ancestors[o]: + ancestor_is_shared_with_all = False + break + if ancestor_is_shared_with_all: + common_ancestor = ancestor + break + for o in owning_modules: + module_to_fields_set[o].remove(d) + module_to_fields_set[common_ancestor].add(d) + + for module_name, module in modules.items(): + if field_name in module: + module[field_name] = sorted(list(module_to_fields_set[module_name])) + + +def _CopyBuildConfigsForDebugging(debug_dir): + shutil.rmtree(debug_dir, ignore_errors=True) + os.makedirs(debug_dir) + for src_path in _dep_config_cache: + dst_path = os.path.join(debug_dir, src_path) + assert dst_path.startswith(debug_dir), dst_path + os.makedirs(os.path.dirname(dst_path), exist_ok=True) + shutil.copy(src_path, dst_path) + print(f'Copied {len(_dep_config_cache)} .build_config.json into {debug_dir}') + + def main(argv): parser = optparse.OptionParser() - build_utils.AddDepfileOption(parser) + action_helpers.add_depfile_arg(parser) parser.add_option('--build-config', help='Path to build_config output.') + parser.add_option('--store-deps-for-debugging-to', + help='Path to copy all transitive build config files to.') parser.add_option( '--type', help='Type of this target (e.g. android_library).') @@ -966,7 +1050,10 @@ def main(argv): parser.add_option('--resources-zip', help='Path to target\'s resources zip.') parser.add_option('--package-name', help='Java package name for these resources.') - parser.add_option('--android-manifest', help='Path to android manifest.') + parser.add_option('--android-manifest', + help='Path to the root android manifest.') + parser.add_option('--merged-android-manifest', + help='Path to the merged android manifest.') parser.add_option('--resource-dirs', action='append', default=[], help='GYP-list of resource dirs') parser.add_option( @@ -993,16 +1080,15 @@ def main(argv): parser.add_option('--treat-as-locale-paks', action='store_true', help='Consider the assets as locale paks in BuildConfig.java') - # java library options + # java library and group options + parser.add_option('--preferred-dep', + action='store_true', + help='Whether the target should be preferred as a dep.') + # java library options parser.add_option('--public-deps-configs', help='GN list of config files of deps which are exposed as ' 'part of the target\'s public API.') - parser.add_option( - '--ignore-dependency-public-deps', - action='store_true', - help='If true, \'public_deps\' will not be collected from the current ' - 'target\'s direct deps.') parser.add_option('--aar-path', help='Path to containing .aar file.') parser.add_option('--device-jar-path', help='Path to .jar for dexing.') parser.add_option('--host-jar-path', help='Path to .jar for java_binary.') @@ -1013,7 +1099,7 @@ def main(argv): help='Path to the interface .jar to use for javac classpath purposes.') parser.add_option('--is-prebuilt', action='store_true', help='Whether the jar was compiled or pre-compiled.') - parser.add_option('--java-sources-file', help='Path to .sources file') + parser.add_option('--target-sources-file', help='Path to .sources file') parser.add_option('--bundled-srcjars', help='GYP-list of .srcjars that have been included in this java_library.') parser.add_option('--supports-android', action='store_true', @@ -1047,6 +1133,11 @@ def main(argv): action='store_true', help='True if a java library is not chromium code, used for lint.') + # robolectric_library options + parser.add_option('--is-robolectric', + action='store_true', + help='Whether this is a host side android test library.') + # android library options parser.add_option('--dex-path', help='Path to target\'s dex output.') @@ -1086,10 +1177,6 @@ def main(argv): parser.add_option( '--library-always-compress', help='The list of library files that we always compress.') - parser.add_option( - '--library-renames', - default=[], - help='The list of library files that we prepend crazy. to their names.') # apk options parser.add_option('--apk-path', help='Path to the target\'s apk output.') @@ -1137,24 +1224,33 @@ def main(argv): parser.add_option( '--base-allowlist-rtxt-path', help='Path to R.txt file for the base resources allowlist.') - parser.add_option( - '--is-base-module', - action='store_true', - help='Specifies that this module is a base module for some app bundle.') parser.add_option('--generate-markdown-format-doc', action='store_true', help='Dump the Markdown .build_config format documentation ' 'then exit immediately.') + parser.add_option('--module-name', help='The name of this feature module.') parser.add_option( '--base-module-build-config', help='Path to the base module\'s build config ' 'if this is a feature module.') + parser.add_option('--parent-module-build-config', + help='Path to the parent module\'s build config ' + 'when not using base module as parent.') parser.add_option( '--module-build-configs', help='For bundles, the paths of all non-async module .build_configs ' 'for modules that are part of the bundle.') + parser.add_option( + '--uses-split', + action='append', + help='List of name pairs separated by : mapping a feature module to a ' + 'dependent feature module.') + + parser.add_option( + '--trace-events-jar-dir', + help='Directory of rewritten .jar files for trace event rewriting.') parser.add_option('--version-name', help='Version name for this APK.') parser.add_option('--version-code', help='Version code for this APK.') @@ -1171,15 +1267,14 @@ def main(argv): return 0 if options.fail: - parser.error('\n'.join(build_utils.ParseGnList(options.fail))) + parser.error('\n'.join(action_helpers.parse_gn_list(options.fail))) lib_options = ['unprocessed_jar_path', 'interface_jar_path'] device_lib_options = ['device_jar_path', 'dex_path'] required_options_map = { 'android_apk': ['build_config'] + lib_options + device_lib_options, 'android_app_bundle_module': - ['build_config', 'final_dex_path', 'res_size_info'] + lib_options + - device_lib_options, + ['build_config', 'res_size_info'] + lib_options + device_lib_options, 'android_assets': ['build_config'], 'android_resources': ['build_config', 'resources_zip'], 'dist_aar': ['build_config'], @@ -1188,7 +1283,7 @@ def main(argv): 'java_annotation_processor': ['build_config', 'main_class'], 'java_binary': ['build_config'], 'java_library': ['build_config', 'host_jar_path'] + lib_options, - 'junit_binary': ['build_config'], + 'robolectric_binary': ['build_config'], 'system_java_library': ['build_config', 'unprocessed_jar_path'], 'android_app_bundle': ['build_config', 'module_build_configs'], } @@ -1208,26 +1303,18 @@ def main(argv): if options.base_allowlist_rtxt_path: raise Exception('--base-allowlist-rtxt-path can only be used with ' '--type=android_app_bundle_module') - if options.is_base_module: - raise Exception('--is-base-module can only be used with ' + if options.module_name: + raise Exception('--module-name can only be used with ' '--type=android_app_bundle_module') is_apk_or_module_target = options.type in ('android_apk', 'android_app_bundle_module') if not is_apk_or_module_target: - if options.uncompress_shared_libraries: - raise Exception('--uncompressed-shared-libraries can only be used ' - 'with --type=android_apk or ' - '--type=android_app_bundle_module') if options.library_always_compress: raise Exception( '--library-always-compress can only be used with --type=android_apk ' 'or --type=android_app_bundle_module') - if options.library_renames: - raise Exception( - '--library-renames can only be used with --type=android_apk or ' - '--type=android_app_bundle_module') if options.device_jar_path and not options.dex_path: raise Exception('java_library that supports Android requires a dex path.') @@ -1240,34 +1327,26 @@ def main(argv): raise Exception( '--supports-android is required when using --requires-android') - is_java_target = options.type in ( - 'java_binary', 'junit_binary', 'java_annotation_processor', - 'java_library', 'android_apk', 'dist_aar', 'dist_jar', - 'system_java_library', 'android_app_bundle_module') + is_java_target = options.type in ('java_binary', 'robolectric_binary', + 'java_annotation_processor', 'java_library', + 'android_apk', 'dist_aar', 'dist_jar', + 'system_java_library', + 'android_app_bundle_module') - is_static_library_dex_provider_target = ( - options.static_library_dependent_configs and options.proguard_enabled) - if is_static_library_dex_provider_target: - if options.type != 'android_apk': - raise Exception( - '--static-library-dependent-configs only supports --type=android_apk') - options.static_library_dependent_configs = build_utils.ParseGnList( - options.static_library_dependent_configs) - static_library_dependent_configs_by_path = { - p: GetDepConfig(p) - for p in options.static_library_dependent_configs - } - - deps_configs_paths = build_utils.ParseGnList(options.deps_configs) + deps_configs_paths = action_helpers.parse_gn_list(options.deps_configs) + public_deps_configs_paths = action_helpers.parse_gn_list( + options.public_deps_configs) + deps_configs_paths += public_deps_configs_paths deps = _DepsFromPaths(deps_configs_paths, options.type, recursive_resource_deps=options.recursive_resource_deps) - processor_deps = _DepsFromPaths( - build_utils.ParseGnList(options.annotation_processor_configs or ''), - options.type, filter_root_targets=False) + public_deps = _DepsFromPaths(public_deps_configs_paths, options.type) + processor_deps = _DepsFromPaths(action_helpers.parse_gn_list( + options.annotation_processor_configs or ''), + options.type, + filter_root_targets=False) - all_inputs = (deps.AllConfigPaths() + processor_deps.AllConfigPaths() + - list(static_library_dependent_configs_by_path)) + all_inputs = (deps.AllConfigPaths() + processor_deps.AllConfigPaths()) if options.recursive_resource_deps: # Include java_library targets since changes to these targets can remove @@ -1278,41 +1357,35 @@ def main(argv): allowlist=['java_library']) all_inputs.extend(recursive_java_deps.AllConfigPaths()) - direct_deps = deps.Direct() system_library_deps = deps.Direct('system_java_library') all_deps = deps.All() all_library_deps = deps.All('java_library') - all_resources_deps = deps.All('android_resources') if options.type == 'java_library': - java_library_deps = _DepsFromPathsWithFilters( - deps_configs_paths, allowlist=['android_resources']) - # for java libraries, we only care about resources that are directly - # reachable without going through another java_library. - all_resources_deps = java_library_deps.All('android_resources') + # For Java libraries, restrict to resource targets that are direct deps, or + # are indirect via other resource targets. + # The indirect-through-other-targets ones are picked up because + # _ResolveGroupsAndPublicDeps() treats resource deps of resource targets as + # public_deps. + all_resources_deps = deps.Direct('android_resources') + else: + all_resources_deps = deps.All('android_resources') + if options.type == 'android_resources' and options.recursive_resource_deps: # android_resources targets that want recursive resource deps also need to # collect package_names from all library deps. This ensures the R.java files # for these libraries will get pulled in along with the resources. android_resources_library_deps = _DepsFromPathsWithFilters( deps_configs_paths, allowlist=['java_library']).All('java_library') - if is_apk_or_module_target: - # android_resources deps which had recursive_resource_deps set should not - # have the manifests from the recursively collected deps added to this - # module. This keeps the manifest declarations in the child DFMs, since they - # will have the Java implementations. - def ExcludeRecursiveResourcesDeps(config): - return not config.get('includes_recursive_resources', False) - - extra_manifest_deps = [ - GetDepConfig(p) for p in GetAllDepsConfigsInOrder( - deps_configs_paths, filter_func=ExcludeRecursiveResourcesDeps) - ] base_module_build_config = None if options.base_module_build_config: - with open(options.base_module_build_config, 'r') as f: - base_module_build_config = json.load(f) + base_module_build_config = GetDepConfigRoot( + options.base_module_build_config) + parent_module_build_config = base_module_build_config + if options.parent_module_build_config: + parent_module_build_config = GetDepConfigRoot( + options.parent_module_build_config) # Initialize some common config. # Any value that needs to be queryable by dependents must go within deps_info. @@ -1322,7 +1395,6 @@ def ExcludeRecursiveResourcesDeps(config): 'path': options.build_config, 'type': options.type, 'gn_target': options.gn_target, - 'deps_configs': [d['path'] for d in direct_deps], 'chromium_code': not options.non_chromium_code, }, # Info needed only by generate_gradle.py. @@ -1331,45 +1403,72 @@ def ExcludeRecursiveResourcesDeps(config): deps_info = config['deps_info'] gradle = config['gradle'] + # The paths we record as deps can differ from deps_config_paths: + # 1) Paths can be removed when blocked by _ROOT_TYPES / _RESOURCE_TYPES. + # 2) Paths can be added when promoted from group deps or public_deps of deps. + # Deps are promoted from groups/public_deps in order to make the filtering + # of 1) work through group() targets (which themselves are not resource + # targets, but should be treated as such when depended on by a resource + # target. A more involved filtering implementation could work to maintain + # the semantics of 1) without the need to promote deps, but we've avoided + # such an undertaking so far. + public_deps_set = set() + if public_deps_configs_paths: + deps_info['public_deps_configs'] = [d['path'] for d in public_deps.Direct()] + public_deps_set = set(deps_info['public_deps_configs']) + + deps_info['deps_configs'] = [ + d['path'] for d in deps.Direct() if d['path'] not in public_deps_set + ] + if options.type == 'android_apk' and options.tested_apk_config: tested_apk_deps = Deps([options.tested_apk_config]) tested_apk_config = tested_apk_deps.Direct()[0] gradle['apk_under_test'] = tested_apk_config['name'] if options.type == 'android_app_bundle_module': - deps_info['is_base_module'] = bool(options.is_base_module) + deps_info['module_name'] = options.module_name # Required for generating gradle files. if options.type == 'java_library': deps_info['is_prebuilt'] = bool(options.is_prebuilt) deps_info['gradle_treat_as_prebuilt'] = options.gradle_treat_as_prebuilt + if options.preferred_dep: + deps_info['preferred_dep'] = bool(options.preferred_dep) + if options.android_manifest: deps_info['android_manifest'] = options.android_manifest + if options.merged_android_manifest: + deps_info['merged_android_manifest'] = options.merged_android_manifest + if options.bundled_srcjars: - deps_info['bundled_srcjars'] = build_utils.ParseGnList( + deps_info['bundled_srcjars'] = action_helpers.parse_gn_list( options.bundled_srcjars) - if options.java_sources_file: - deps_info['java_sources_file'] = options.java_sources_file + if options.target_sources_file: + deps_info['target_sources_file'] = options.target_sources_file if is_java_target: - if options.bundled_srcjars: - gradle['bundled_srcjars'] = deps_info['bundled_srcjars'] - - gradle['dependent_android_projects'] = [] - gradle['dependent_java_projects'] = [] - gradle['dependent_prebuilt_jars'] = deps.GradlePrebuiltJarPaths() - if options.main_class: deps_info['main_class'] = options.main_class + dependent_prebuilt_jars = deps.GradlePrebuiltJarPaths() + dependent_prebuilt_jars.sort() + if dependent_prebuilt_jars: + gradle['dependent_prebuilt_jars'] = dependent_prebuilt_jars + + dependent_android_projects = [] + dependent_java_projects = [] for c in deps.GradleLibraryProjectDeps(): if c['requires_android']: - gradle['dependent_android_projects'].append(c['path']) + dependent_android_projects.append(c['path']) else: - gradle['dependent_java_projects'].append(c['path']) + dependent_java_projects.append(c['path']) + + gradle['dependent_android_projects'] = dependent_android_projects + gradle['dependent_java_projects'] = dependent_java_projects if options.r_text_path: deps_info['r_text_path'] = options.r_text_path @@ -1377,14 +1476,18 @@ def ExcludeRecursiveResourcesDeps(config): # TODO(tiborg): Remove creation of JNI info for type group and java_library # once we can generate the JNI registration based on APK / module targets as # opposed to groups and libraries. - if is_apk_or_module_target or options.type in ( - 'group', 'java_library', 'junit_binary'): - deps_info['jni'] = {} - all_java_sources = [c['java_sources_file'] for c in all_library_deps - if 'java_sources_file' in c] - if options.java_sources_file: - all_java_sources.append(options.java_sources_file) + if is_apk_or_module_target or options.type in ('group', 'java_library', + 'robolectric_binary', + 'dist_aar'): + all_target_sources = [ + c['target_sources_file'] for c in all_library_deps + if 'target_sources_file' in c + ] + if options.target_sources_file: + all_target_sources.append(options.target_sources_file) + if is_apk_or_module_target or options.type in ('group', 'java_library', + 'robolectric_binary'): if options.apk_proto_resources: deps_info['proto_resources_path'] = options.apk_proto_resources @@ -1409,7 +1512,10 @@ def ExcludeRecursiveResourcesDeps(config): deps_info['requires_android'] = bool(options.requires_android) deps_info['supports_android'] = bool(options.supports_android) - if not options.bypass_platform_checks: + # robolectric is special in that its an android target that runs on host. + # You are allowed to depend on both android |deps_require_android| and + # non-android |deps_not_support_android| targets. + if not options.bypass_platform_checks and not options.is_robolectric: deps_require_android = (all_resources_deps + [d['name'] for d in all_library_deps if d['requires_android']]) deps_not_support_android = ( @@ -1434,9 +1540,6 @@ def ExcludeRecursiveResourcesDeps(config): if options.unprocessed_jar_path: deps_info['unprocessed_jar_path'] = options.unprocessed_jar_path deps_info['interface_jar_path'] = options.interface_jar_path - if options.public_deps_configs: - deps_info['public_deps_configs'] = build_utils.ParseGnList( - options.public_deps_configs) if options.device_jar_path: deps_info['device_jar_path'] = options.device_jar_path if options.host_jar_path: @@ -1457,16 +1560,17 @@ def ExcludeRecursiveResourcesDeps(config): all_asset_sources = [] if options.asset_renaming_sources: all_asset_sources.extend( - build_utils.ParseGnList(options.asset_renaming_sources)) + action_helpers.parse_gn_list(options.asset_renaming_sources)) if options.asset_sources: - all_asset_sources.extend(build_utils.ParseGnList(options.asset_sources)) + all_asset_sources.extend( + action_helpers.parse_gn_list(options.asset_sources)) deps_info['assets'] = { 'sources': all_asset_sources } if options.asset_renaming_destinations: - deps_info['assets']['outputs'] = ( - build_utils.ParseGnList(options.asset_renaming_destinations)) + deps_info['assets']['outputs'] = (action_helpers.parse_gn_list( + options.asset_renaming_destinations)) if options.disable_asset_compression: deps_info['assets']['disable_compression'] = True if options.treat_as_locale_paks: @@ -1487,15 +1591,12 @@ def ExcludeRecursiveResourcesDeps(config): if options.res_sources_path: deps_info['res_sources_path'] = options.res_sources_path - if options.requires_android and options.type == 'java_library': - # Used to strip out R.class for android_prebuilt()s. - config['javac']['resource_packages'] = [ - c['package_name'] for c in all_resources_deps if 'package_name' in c - ] + if (options.requires_android + and options.type == 'java_library') or options.is_robolectric: if options.package_name: deps_info['package_name'] = options.package_name - if options.type in ('android_resources', 'android_apk', 'junit_binary', + if options.type in ('android_resources', 'android_apk', 'robolectric_binary', 'dist_aar', 'android_app_bundle_module', 'java_library'): dependency_zips = [] dependency_zip_overlays = [] @@ -1513,6 +1614,8 @@ def ExcludeRecursiveResourcesDeps(config): extra_package_names = [ c['package_name'] for c in all_resources_deps if 'package_name' in c ] + if options.package_name: + extra_package_names += [options.package_name] # android_resources targets which specified recursive_resource_deps may # have extra_package_names. @@ -1541,22 +1644,6 @@ def ExcludeRecursiveResourcesDeps(config): ] deps_info['dependency_r_txt_files'] = r_text_files - # For feature modules, remove any resources that already exist in the base - # module. - if base_module_build_config: - dependency_zips = [ - c for c in dependency_zips - if c not in base_module_build_config['deps_info']['dependency_zips'] - ] - dependency_zip_overlays = [ - c for c in dependency_zip_overlays if c not in - base_module_build_config['deps_info']['dependency_zip_overlays'] - ] - extra_package_names = [ - c for c in extra_package_names if c not in - base_module_build_config['deps_info']['extra_package_names'] - ] - if options.type == 'android_apk' and options.tested_apk_config: config['deps_info']['arsc_package_name'] = ( tested_apk_config['package_name']) @@ -1569,35 +1656,37 @@ def ExcludeRecursiveResourcesDeps(config): if options.res_size_info: config['deps_info']['res_size_info'] = options.res_size_info + # Safe to sort: Build checks that non-overlay resource have no overlap. + dependency_zips.sort() config['deps_info']['dependency_zips'] = dependency_zips config['deps_info']['dependency_zip_overlays'] = dependency_zip_overlays + # Order doesn't matter, so make stable. + extra_package_names.sort() config['deps_info']['extra_package_names'] = extra_package_names # These are .jars to add to javac classpath but not to runtime classpath. - extra_classpath_jars = build_utils.ParseGnList(options.extra_classpath_jars) + extra_classpath_jars = action_helpers.parse_gn_list( + options.extra_classpath_jars) if extra_classpath_jars: + extra_classpath_jars.sort() deps_info['extra_classpath_jars'] = extra_classpath_jars - mergeable_android_manifests = build_utils.ParseGnList( + mergeable_android_manifests = action_helpers.parse_gn_list( options.mergeable_android_manifests) + mergeable_android_manifests.sort() if mergeable_android_manifests: deps_info['mergeable_android_manifests'] = mergeable_android_manifests extra_proguard_classpath_jars = [] - proguard_configs = build_utils.ParseGnList(options.proguard_configs) + proguard_configs = action_helpers.parse_gn_list(options.proguard_configs) if proguard_configs: # Make a copy of |proguard_configs| since it's mutated below. deps_info['proguard_configs'] = list(proguard_configs) if is_java_target: - if options.ignore_dependency_public_deps: - classpath_direct_deps = deps.Direct() - classpath_direct_library_deps = deps.Direct('java_library') - else: - classpath_direct_deps = deps.DirectAndChildPublicDeps() - classpath_direct_library_deps = deps.DirectAndChildPublicDeps( - 'java_library') + classpath_direct_deps = deps.Direct() + classpath_direct_library_deps = deps.Direct('java_library') # The classpath used to compile this target when annotation processors are # present. @@ -1665,7 +1754,7 @@ def ExcludeRecursiveResourcesDeps(config): device_classpath.extend(c for c in d.get('device_classpath', []) if c not in device_classpath) - if options.type in ('dist_jar', 'java_binary', 'junit_binary'): + if options.type in ('dist_jar', 'java_binary', 'robolectric_binary'): # The classpath to use to run this target. host_classpath = [] if options.host_jar_path: @@ -1684,18 +1773,18 @@ def ExcludeRecursiveResourcesDeps(config): # Collect all sources and resources at the apk/bundle_module level. lint_aars = set() lint_srcjars = set() - lint_java_sources = set() + lint_sources = set() lint_resource_sources = set() lint_resource_zips = set() - if options.java_sources_file: - lint_java_sources.add(options.java_sources_file) + if options.target_sources_file: + lint_sources.add(options.target_sources_file) if options.bundled_srcjars: lint_srcjars.update(deps_info['bundled_srcjars']) for c in all_library_deps: if c['chromium_code'] and c['requires_android']: - if 'java_sources_file' in c: - lint_java_sources.add(c['java_sources_file']) + if 'target_sources_file' in c: + lint_sources.add(c['target_sources_file']) lint_srcjars.update(c['bundled_srcjars']) if 'aar_path' in c: lint_aars.add(c['aar_path']) @@ -1715,7 +1804,7 @@ def ExcludeRecursiveResourcesDeps(config): deps_info['lint_aars'] = sorted(lint_aars) deps_info['lint_srcjars'] = sorted(lint_srcjars) - deps_info['lint_java_sources'] = sorted(lint_java_sources) + deps_info['lint_sources'] = sorted(lint_sources) deps_info['lint_resource_sources'] = sorted(lint_resource_sources) deps_info['lint_resource_zips'] = sorted(lint_resource_zips) deps_info['lint_extra_android_manifests'] = [] @@ -1725,96 +1814,61 @@ def ExcludeRecursiveResourcesDeps(config): deps_info['lint_android_manifest'] = options.android_manifest if options.type == 'android_app_bundle': - module_configs = [ - GetDepConfig(c) - for c in build_utils.ParseGnList(options.module_build_configs) + module_config_paths = action_helpers.parse_gn_list( + options.module_build_configs) + module_configs = [GetDepConfig(c) for c in module_config_paths] + module_configs_by_name = {d['module_name']: d for d in module_configs} + per_module_fields = [ + 'device_classpath', 'trace_event_rewritten_device_classpath', + 'all_dex_files' ] jni_all_source = set() lint_aars = set() lint_srcjars = set() - lint_java_sources = set() + lint_sources = set() lint_resource_sources = set() lint_resource_zips = set() lint_extra_android_manifests = set() - for c in module_configs: - if c['is_base_module']: + config['modules'] = {} + modules = config['modules'] + for n, c in module_configs_by_name.items(): + if n == 'base': assert 'base_module_config' not in deps_info, ( 'Must have exactly 1 base module!') + deps_info['package_name'] = c['package_name'] + deps_info['version_code'] = c['version_code'] + deps_info['version_name'] = c['version_name'] deps_info['base_module_config'] = c['path'] # Use the base module's android manifest for linting. deps_info['lint_android_manifest'] = c['android_manifest'] else: lint_extra_android_manifests.add(c['android_manifest']) - jni_all_source.update(c['jni']['all_source']) + jni_all_source.update(c['jni_all_source']) lint_aars.update(c['lint_aars']) lint_srcjars.update(c['lint_srcjars']) - lint_java_sources.update(c['lint_java_sources']) + lint_sources.update(c['lint_sources']) lint_resource_sources.update(c['lint_resource_sources']) lint_resource_zips.update(c['lint_resource_zips']) - deps_info['jni'] = {'all_source': sorted(jni_all_source)} + module = modules[n] = {} + for f in per_module_fields: + if f in c: + module[f] = c[f] + deps_info['jni_all_source'] = sorted(jni_all_source) deps_info['lint_aars'] = sorted(lint_aars) deps_info['lint_srcjars'] = sorted(lint_srcjars) - deps_info['lint_java_sources'] = sorted(lint_java_sources) + deps_info['lint_sources'] = sorted(lint_sources) deps_info['lint_resource_sources'] = sorted(lint_resource_sources) deps_info['lint_resource_zips'] = sorted(lint_resource_zips) deps_info['lint_extra_android_manifests'] = sorted( lint_extra_android_manifests) - # Map configs to classpath entries that should be included in their final dex. - classpath_entries_by_owning_config = collections.defaultdict(list) - extra_main_r_text_files = [] - if is_static_library_dex_provider_target: - # Map classpath entries to configs that include them in their classpath. - configs_by_classpath_entry = collections.defaultdict(list) - static_lib_jar_paths = {} - for config_path, dep_config in (sorted( - static_library_dependent_configs_by_path.items())): - # For bundles, only the jar path and jni sources of the base module - # are relevant for proguard. Should be updated when bundle feature - # modules support JNI. - base_config = dep_config - if dep_config['type'] == 'android_app_bundle': - base_config = GetDepConfig(dep_config['base_module_config']) - extra_main_r_text_files.append(base_config['r_text_path']) - static_lib_jar_paths[config_path] = base_config['device_jar_path'] - proguard_configs.extend(dep_config['proguard_all_configs']) - extra_proguard_classpath_jars.extend( - dep_config['proguard_classpath_jars']) - all_java_sources.extend(base_config['jni']['all_source']) - - # The srcjars containing the generated R.java files are excluded for APK - # targets the use static libraries, so we add them here to ensure the - # union of resource IDs are available in the static library APK. - for package in base_config['extra_package_names']: - if package not in extra_package_names: - extra_package_names.append(package) - for cp_entry in dep_config['device_classpath']: - configs_by_classpath_entry[cp_entry].append(config_path) - - for cp_entry in device_classpath: - configs_by_classpath_entry[cp_entry].append(options.build_config) - - for cp_entry, candidate_configs in configs_by_classpath_entry.items(): - config_path = (candidate_configs[0] - if len(candidate_configs) == 1 else options.build_config) - classpath_entries_by_owning_config[config_path].append(cp_entry) - device_classpath.append(cp_entry) - - device_classpath = sorted(set(device_classpath)) - - deps_info['static_library_proguard_mapping_output_paths'] = sorted([ - d['proguard_mapping_path'] - for d in static_library_dependent_configs_by_path.values() - ]) - deps_info['static_library_dependent_classpath_configs'] = { - path: sorted(set(classpath)) - for path, classpath in classpath_entries_by_owning_config.items() - } - deps_info['extra_main_r_text_files'] = sorted(extra_main_r_text_files) + _DedupFeatureModuleSharedCode(options.uses_split, modules, + per_module_fields) if is_apk_or_module_target or options.type in ('group', 'java_library', - 'junit_binary'): - deps_info['jni']['all_source'] = sorted(set(all_java_sources)) + 'robolectric_binary', + 'dist_aar'): + deps_info['jni_all_source'] = sorted(set(all_target_sources)) system_jars = [c['unprocessed_jar_path'] for c in system_library_deps] system_interface_jars = [c['interface_jar_path'] for c in system_library_deps] @@ -1920,14 +1974,11 @@ def ExcludeRecursiveResourcesDeps(config): deps_info['proguard_classpath_jars'] = sorted( set(extra_proguard_classpath_jars)) - # Dependencies for the final dex file of an apk. - if (is_apk_or_module_target or options.final_dex_path - or options.type == 'dist_jar'): - config['final_dex'] = {} - dex_config = config['final_dex'] - dex_config['path'] = options.final_dex_path + if options.final_dex_path: + config['final_dex'] = {'path': options.final_dex_path} if is_apk_or_module_target or options.type == 'dist_jar': - dex_config['all_dex_files'] = all_dex_files + # Dependencies for the final dex file of an apk. + deps_info['all_dex_files'] = all_dex_files if is_java_target: config['javac']['classpath'] = sorted(javac_classpath) @@ -1942,8 +1993,8 @@ def ExcludeRecursiveResourcesDeps(config): config['javac']['processor_classpath'] += [ c['host_jar_path'] for c in processor_deps.All('java_library') ] - config['javac']['processor_classes'] = [ - c['main_class'] for c in processor_deps.Direct()] + config['javac']['processor_classes'] = sorted( + c['main_class'] for c in processor_deps.Direct()) deps_info['javac_full_classpath'] = list(javac_full_classpath) deps_info['javac_full_interface_classpath'] = list( javac_full_interface_classpath) @@ -1961,9 +2012,23 @@ def ExcludeRecursiveResourcesDeps(config): deps_info['javac_full_interface_classpath'] = list( javac_full_interface_classpath) - if options.type in ('android_apk', 'dist_jar', 'android_app_bundle_module', - 'android_app_bundle'): + if options.type in ('android_apk', 'android_app_bundle', + 'android_app_bundle_module', 'dist_aar', 'dist_jar'): deps_info['device_classpath'] = device_classpath + if options.trace_events_jar_dir: + trace_event_rewritten_device_classpath = [] + for jar_path in device_classpath: + file_path = jar_path.replace('../', '') + file_path = file_path.replace('obj/', '') + file_path = file_path.replace('gen/', '') + file_path = file_path.replace('.jar', '.tracing_rewritten.jar') + rewritten_jar_path = os.path.join(options.trace_events_jar_dir, + file_path) + trace_event_rewritten_device_classpath.append(rewritten_jar_path) + + deps_info['trace_event_rewritten_device_classpath'] = ( + trace_event_rewritten_device_classpath) + if options.tested_apk_config: deps_info['device_classpath_extended'] = device_classpath_extended @@ -1996,17 +2061,34 @@ def ExcludeRecursiveResourcesDeps(config): if options.secondary_abi_shared_libraries_runtime_deps: secondary_abi_library_paths = _ExtractSharedLibsFromRuntimeDeps( options.secondary_abi_shared_libraries_runtime_deps) + secondary_abi_library_paths.sort() + paths_without_parent_dirs = [ + p for p in secondary_abi_library_paths if os.path.sep not in p + ] + if paths_without_parent_dirs: + sys.stderr.write('Found secondary native libraries from primary ' + 'toolchain directory. This is a bug!\n') + sys.stderr.write('\n'.join(paths_without_parent_dirs)) + sys.stderr.write('\n\nIt may be helpful to run: \n') + sys.stderr.write(' gn path out/Default //chrome/android:' + 'monochrome_secondary_abi_lib //base:base\n') + sys.exit(1) + all_inputs.append(options.secondary_abi_shared_libraries_runtime_deps) - native_library_placeholder_paths = build_utils.ParseGnList( + native_library_placeholder_paths = action_helpers.parse_gn_list( options.native_lib_placeholders) + native_library_placeholder_paths.sort() - secondary_native_library_placeholder_paths = build_utils.ParseGnList( + secondary_native_library_placeholder_paths = action_helpers.parse_gn_list( options.secondary_native_lib_placeholders) + secondary_native_library_placeholder_paths.sort() - loadable_modules = build_utils.ParseGnList(options.loadable_modules) - secondary_abi_loadable_modules = build_utils.ParseGnList( + loadable_modules = action_helpers.parse_gn_list(options.loadable_modules) + loadable_modules.sort() + secondary_abi_loadable_modules = action_helpers.parse_gn_list( options.secondary_abi_loadable_modules) + secondary_abi_loadable_modules.sort() config['native'] = { 'libraries': @@ -2019,27 +2101,13 @@ def ExcludeRecursiveResourcesDeps(config): secondary_native_library_placeholder_paths, 'java_libraries_list': java_libraries_list, - 'uncompress_shared_libraries': - options.uncompress_shared_libraries, 'library_always_compress': options.library_always_compress, - 'library_renames': - options.library_renames, 'loadable_modules': loadable_modules, 'secondary_abi_loadable_modules': secondary_abi_loadable_modules, } - config['assets'], config['uncompressed_assets'], locale_paks = ( - _MergeAssets(deps.All('android_assets'))) - - deps_info['locales_java_list'] = _CreateJavaLocaleListFromAssets( - config['uncompressed_assets'], locale_paks) - - config['extra_android_manifests'] = [] - for c in extra_manifest_deps: - config['extra_android_manifests'].extend( - c.get('mergeable_android_manifests', [])) # Collect java resources java_resources_jars = [d['java_resources_jar'] for d in all_library_deps @@ -2050,22 +2118,63 @@ def ExcludeRecursiveResourcesDeps(config): if 'java_resources_jar' in d] java_resources_jars = [jar for jar in java_resources_jars if jar not in tested_apk_resource_jars] + java_resources_jars.sort() config['java_resources_jars'] = java_resources_jars + if is_apk_or_module_target or options.type == 'robolectric_binary': + # android_resources deps which had recursive_resource_deps set should not + # have the manifests from the recursively collected deps added to this + # module. This keeps the manifest declarations in the child DFMs, since they + # will have the Java implementations. + def ExcludeRecursiveResourcesDeps(config): + return not config.get('includes_recursive_resources', False) + + extra_manifest_deps = [ + GetDepConfig(p) for p in GetAllDepsConfigsInOrder( + deps_configs_paths, filter_func=ExcludeRecursiveResourcesDeps) + ] + # Manifests are listed from highest priority to lowest priority. + # Ensure directly manfifests come first, and then sort the rest by name. + # https://developer.android.com/build/manage-manifests#merge_priorities + config['extra_android_manifests'] = list(mergeable_android_manifests) + manifests_from_deps = [] + for c in extra_manifest_deps: + manifests_from_deps += c.get('mergeable_android_manifests', []) + manifests_from_deps.sort(key=lambda p: (os.path.basename(p), p)) + config['extra_android_manifests'] += manifests_from_deps + + config['assets'], config['uncompressed_assets'], locale_paks = ( + _MergeAssets(deps.All('android_assets'))) + deps_info['locales_java_list'] = _CreateJavaLocaleListFromAssets( + config['uncompressed_assets'], locale_paks) + if options.java_resources_jar_path: deps_info['java_resources_jar'] = options.java_resources_jar_path # DYNAMIC FEATURE MODULES: - # Make sure that dependencies that exist on the base module - # are not duplicated on the feature module. + # There are two approaches to dealing with modules dependencies: + # 1) Perform steps in android_apk_or_module(), with only the knowledge of + # ancesstor splits. Our implementation currently allows only for 2 levels: + # base -> parent -> leaf + # Bundletool normally fails if two leaf nodes merge the same manifest or + # resources. The fix is to add the common dep to the chrome or base module + # so that our deduplication logic will work. + # RemoveObjDups() implements this approach. + # 2) Perform steps in android_app_bundle(), with knowledge of full set of + # modules. This is required for dex because it can handle the case of two + # leaf nodes having the same dep, and promoting that dep to their common + # parent. + # _DedupFeatureModuleSharedCode() implements this approach. if base_module_build_config: - base = base_module_build_config - RemoveObjDups(config, base, 'deps_info', 'device_classpath') - RemoveObjDups(config, base, 'deps_info', 'javac_full_classpath') - RemoveObjDups(config, base, 'deps_info', 'javac_full_interface_classpath') - RemoveObjDups(config, base, 'deps_info', 'jni', 'all_source') - RemoveObjDups(config, base, 'final_dex', 'all_dex_files') - RemoveObjDups(config, base, 'extra_android_manifests') + ancestors = [base_module_build_config] + if parent_module_build_config is not base_module_build_config: + ancestors += [parent_module_build_config] + for ancestor in ancestors: + RemoveObjDups(config, ancestor, 'deps_info', 'dependency_zips') + RemoveObjDups(config, ancestor, 'deps_info', 'dependency_zip_overlays') + RemoveObjDups(config, ancestor, 'deps_info', 'extra_package_names') + RemoveObjDups(config, ancestor, 'deps_info', 'jni_all_source') + RemoveObjDups(config, ancestor, 'extra_android_manifests') if is_java_target: jar_to_target = {} @@ -2073,6 +2182,8 @@ def ExcludeRecursiveResourcesDeps(config): _AddJarMapping(jar_to_target, all_deps) if base_module_build_config: _AddJarMapping(jar_to_target, [base_module_build_config['deps_info']]) + if parent_module_build_config is not base_module_build_config: + _AddJarMapping(jar_to_target, [parent_module_build_config['deps_info']]) if options.tested_apk_config: _AddJarMapping(jar_to_target, [tested_apk_config]) for jar, target in zip(tested_apk_config['javac_full_classpath'], @@ -2080,7 +2191,9 @@ def ExcludeRecursiveResourcesDeps(config): jar_to_target[jar] = target # Used by bytecode_processor to give better error message when missing - # deps are found. + # deps are found. Both javac_full_classpath_targets and javac_full_classpath + # must be in identical orders, as they get passed as separate arrays and + # then paired up based on index. config['deps_info']['javac_full_classpath_targets'] = [ jar_to_target[x] for x in deps_info['javac_full_classpath'] ] @@ -2088,8 +2201,14 @@ def ExcludeRecursiveResourcesDeps(config): build_utils.WriteJson(config, options.build_config, only_if_changed=True) if options.depfile: - build_utils.WriteDepfile(options.depfile, options.build_config, - sorted(set(all_inputs))) + action_helpers.write_depfile(options.depfile, options.build_config, + sorted(set(all_inputs))) + + if options.store_deps_for_debugging_to: + GetDepConfig(options.build_config) # Add it to cache. + _CopyBuildConfigsForDebugging(options.store_deps_for_debugging_to) + + return 0 if __name__ == '__main__': diff --git a/build/android/gyp/write_build_config.pydeps b/build/android/gyp/write_build_config.pydeps index b1276bca7b2c..fa7209c2798d 100644 --- a/build/android/gyp/write_build_config.pydeps +++ b/build/android/gyp/write_build_config.pydeps @@ -1,9 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_build_config.pydeps build/android/gyp/write_build_config.py ../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py -../../../third_party/jinja2/asyncfilters.py -../../../third_party/jinja2/asyncsupport.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py ../../../third_party/jinja2/bccache.py ../../../third_party/jinja2/compiler.py ../../../third_party/jinja2/defaults.py @@ -23,6 +22,7 @@ ../../../third_party/markupsafe/__init__.py ../../../third_party/markupsafe/_compat.py ../../../third_party/markupsafe/_native.py +../../action_helpers.py ../../gn_helpers.py util/__init__.py util/build_utils.py diff --git a/build/android/gyp/write_native_libraries_java.py b/build/android/gyp/write_native_libraries_java.py index 322b8b2c8277..fb4d2ad18334 100755 --- a/build/android/gyp/write_native_libraries_java.py +++ b/build/android/gyp/write_native_libraries_java.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -12,6 +12,8 @@ import zipfile from util import build_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers _NATIVE_LIBRARIES_TEMPLATE = """\ @@ -29,8 +31,6 @@ // Set to true to enable the use of the Chromium Linker. public static {MAYBE_FINAL}boolean sUseLinker{USE_LINKER}; - public static {MAYBE_FINAL}boolean sUseLibraryInZipFile{USE_LIBRARY_IN_ZIP_FILE}; - public static {MAYBE_FINAL}boolean sUseModernLinker{USE_MODERN_LINKER}; // This is the list of native libraries to be loaded (in the correct order) // by LibraryLoader.java. @@ -52,18 +52,12 @@ def _FormatLibraryName(library_name): def main(): parser = argparse.ArgumentParser() - build_utils.AddDepfileOption(parser) + action_helpers.add_depfile_arg(parser) parser.add_argument('--final', action='store_true', help='Use final fields.') parser.add_argument( '--enable-chromium-linker', action='store_true', help='Enable Chromium linker.') - parser.add_argument( - '--load-library-from-apk', - action='store_true', - help='Load libaries from APK without uncompressing.') - parser.add_argument( - '--use-modern-linker', action='store_true', help='To use ModernLinker.') parser.add_argument( '--native-libraries-list', help='File with list of native libraries.') parser.add_argument( @@ -85,45 +79,45 @@ def main(): options = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:])) - assert (options.enable_chromium_linker or not options.load_library_from_apk) - - native_libraries_list = [] + native_libraries = [] if options.main_component_library: - native_libraries_list.append( - _FormatLibraryName(options.main_component_library)) + native_libraries.append(options.main_component_library) elif options.native_libraries_list: with open(options.native_libraries_list) as f: - for path in f: - path = path.strip() - native_libraries_list.append(_FormatLibraryName(path)) + native_libraries.extend(l.strip() for l in f) + + if options.enable_chromium_linker and len(native_libraries) > 1: + sys.stderr.write( + 'Multiple libraries not supported when using chromium linker. Found:\n') + sys.stderr.write('\n'.join(native_libraries)) + sys.stderr.write('\n') + sys.exit(1) def bool_str(value): if value: return ' = true' - elif options.final: + if options.final: return ' = false' return '' format_dict = { 'MAYBE_FINAL': 'final ' if options.final else '', 'USE_LINKER': bool_str(options.enable_chromium_linker), - 'USE_LIBRARY_IN_ZIP_FILE': bool_str(options.load_library_from_apk), - 'USE_MODERN_LINKER': bool_str(options.use_modern_linker), - 'LIBRARIES': ','.join(native_libraries_list), + 'LIBRARIES': ','.join(_FormatLibraryName(n) for n in native_libraries), 'CPU_FAMILY': options.cpu_family, } - with build_utils.AtomicOutput(options.output) as f: + with action_helpers.atomic_output(options.output) as f: with zipfile.ZipFile(f.name, 'w') as srcjar_file: - build_utils.AddToZipHermetic( + zip_helpers.add_to_zip_hermetic( zip_file=srcjar_file, zip_path='org/chromium/build/NativeLibraries.java', data=_NATIVE_LIBRARIES_TEMPLATE.format(**format_dict)) if options.depfile: assert options.native_libraries_list - build_utils.WriteDepfile(options.depfile, - options.output, - inputs=[options.native_libraries_list]) + action_helpers.write_depfile(options.depfile, + options.output, + inputs=[options.native_libraries_list]) if __name__ == '__main__': diff --git a/build/android/gyp/write_native_libraries_java.pydeps b/build/android/gyp/write_native_libraries_java.pydeps index f5176ef78e3f..c47e1652ce0f 100644 --- a/build/android/gyp/write_native_libraries_java.pydeps +++ b/build/android/gyp/write_native_libraries_java.pydeps @@ -1,6 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_native_libraries_java.pydeps build/android/gyp/write_native_libraries_java.py +../../action_helpers.py ../../gn_helpers.py +../../zip_helpers.py util/__init__.py util/build_utils.py write_native_libraries_java.py diff --git a/build/android/gyp/zip.py b/build/android/gyp/zip.py index 6b405400eb24..f4b4acfb2efd 100755 --- a/build/android/gyp/zip.py +++ b/build/android/gyp/zip.py @@ -1,16 +1,19 @@ #!/usr/bin/env python3 # -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Archives a set of files.""" import argparse +import json import os import sys import zipfile from util import build_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers def main(args): @@ -33,39 +36,47 @@ def main(args): action='store_false', dest='compress', help='Do not compress entries') - build_utils.AddDepfileOption(parser) + parser.add_argument('--comment-json', + action='append', + metavar='KEY=VALUE', + type=lambda x: x.split('=', 1), + help='Entry to store in JSON-encoded archive comment.') + action_helpers.add_depfile_arg(parser) options = parser.parse_args(args) - with build_utils.AtomicOutput(options.output) as f: + with action_helpers.atomic_output(options.output) as f: with zipfile.ZipFile(f.name, 'w') as out_zip: depfile_deps = None if options.input_files: - files = build_utils.ParseGnList(options.input_files) - build_utils.DoZip( - files, - out_zip, - base_dir=options.input_files_base_dir, - compress_fn=lambda _: options.compress) + files = action_helpers.parse_gn_list(options.input_files) + zip_helpers.add_files_to_zip(files, + out_zip, + base_dir=options.input_files_base_dir, + compress=options.compress) if options.input_zips: - files = build_utils.ParseGnList(options.input_zips) + files = action_helpers.parse_gn_list(options.input_zips) depfile_deps = files path_transform = None if options.input_zips_excluded_globs: - globs = build_utils.ParseGnList(options.input_zips_excluded_globs) + globs = action_helpers.parse_gn_list( + options.input_zips_excluded_globs) path_transform = ( lambda p: None if build_utils.MatchesGlob(p, globs) else p) - build_utils.MergeZips( - out_zip, - files, - path_transform=path_transform, - compress=options.compress) + zip_helpers.merge_zips(out_zip, + files, + path_transform=path_transform, + compress=options.compress) + + if options.comment_json: + out_zip.comment = json.dumps(dict(options.comment_json), + sort_keys=True).encode('utf-8') # Depfile used only by dist_jar(). if options.depfile: - build_utils.WriteDepfile(options.depfile, - options.output, - inputs=depfile_deps) + action_helpers.write_depfile(options.depfile, + options.output, + inputs=depfile_deps) if __name__ == '__main__': diff --git a/build/android/gyp/zip.pydeps b/build/android/gyp/zip.pydeps index 36affd1707fb..973fe436c2e4 100644 --- a/build/android/gyp/zip.pydeps +++ b/build/android/gyp/zip.pydeps @@ -1,6 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/zip.pydeps build/android/gyp/zip.py +../../action_helpers.py ../../gn_helpers.py +../../zip_helpers.py util/__init__.py util/build_utils.py zip.py diff --git a/build/android/host_heartbeat.py b/build/android/host_heartbeat.py index 4e11c5c2498a..f22c2d7e8e0f 100755 --- a/build/android/host_heartbeat.py +++ b/build/android/host_heartbeat.py @@ -1,6 +1,6 @@ -#!/usr/bin/env vpython +#!/usr/bin/env vpython3 # -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/incremental_install/BUILD.gn b/build/android/incremental_install/BUILD.gn index 8d26e9622b08..e2134dd14990 100644 --- a/build/android/incremental_install/BUILD.gn +++ b/build/android/incremental_install/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -13,6 +13,7 @@ android_library("bootstrap_java") { "java/org/chromium/incrementalinstall/Reflect.java", "java/org/chromium/incrementalinstall/SecondInstrumentation.java", ] + deps = [ "third_party/AndroidHiddenApiBypass:hidden_api_bypass_java" ] jacoco_never_instrument = true no_build_hooks = true } diff --git a/build/android/incremental_install/__init__.py b/build/android/incremental_install/__init__.py index 50b23dff631d..a43e6af7224b 100644 --- a/build/android/incremental_install/__init__.py +++ b/build/android/incremental_install/__init__.py @@ -1,3 +1,3 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/incremental_install/generate_android_manifest.py b/build/android/incremental_install/generate_android_manifest.py index e069dab80ef9..ffa26c20b924 100755 --- a/build/android/incremental_install/generate_android_manifest.py +++ b/build/android/incremental_install/generate_android_manifest.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Creates an AndroidManifest.xml for an incremental APK. @@ -11,16 +11,13 @@ import argparse import os -import subprocess import sys -import tempfile -import zipfile from xml.etree import ElementTree sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir, 'gyp')) from util import build_utils from util import manifest_utils -from util import resource_utils +import action_helpers # build_utils adds //build to sys.path. _INCREMENTAL_APP_NAME = 'org.chromium.incrementalinstall.BootstrapApplication' _META_DATA_APP_NAME = 'incremental-install-real-app' @@ -42,23 +39,18 @@ def _AddNamespace(name): def _ParseArgs(args): parser = argparse.ArgumentParser() - parser.add_argument( - '--src-manifest', required=True, help='The main manifest of the app') + parser.add_argument('--src-manifest', + required=True, + help='The main manifest of the app.') + parser.add_argument('--dst-manifest', + required=True, + help='The output modified manifest.') parser.add_argument('--disable-isolated-processes', help='Changes all android:isolatedProcess to false. ' 'This is required on Android M+', action='store_true') - parser.add_argument( - '--out-apk', required=True, help='Path to output .ap_ file') - parser.add_argument( - '--in-apk', required=True, help='Path to non-incremental .ap_ file') - parser.add_argument( - '--aapt2-path', required=True, help='Path to the Android aapt tool') - parser.add_argument( - '--android-sdk-jars', help='GN List of resource apks to include.') ret = parser.parse_args(build_utils.ExpandFileArgs(args)) - ret.android_sdk_jars = build_utils.ParseGnList(ret.android_sdk_jars) return ret @@ -68,13 +60,8 @@ def _CreateMetaData(parent, name, value): meta_data_node.set(_AddNamespace('value'), value) -def _ProcessManifest(path, arsc_package_name, disable_isolated_processes): - doc, manifest_node, app_node = manifest_utils.ParseManifest(path) - - # Ensure the manifest package matches that of the apk's arsc package - # So that resource references resolve correctly. The actual manifest - # package name is set via --rename-manifest-package. - manifest_node.set('package', arsc_package_name) +def _ProcessManifest(path, disable_isolated_processes): + doc, _, app_node = manifest_utils.ParseManifest(path) # Pylint for some reason things app_node is an int. # pylint: disable=no-member @@ -100,40 +87,19 @@ def _ProcessManifest(path, arsc_package_name, disable_isolated_processes): ret = ret.replace(b'extractNativeLibs="false"', b'extractNativeLibs="true"') if disable_isolated_processes: ret = ret.replace(b'isolatedProcess="true"', b'isolatedProcess="false"') + # externalService only matters for isolatedProcess="true". See: + # https://developer.android.com/reference/android/R.attr#externalService + ret = ret.replace(b'externalService="true"', b'externalService="false"') return ret def main(raw_args): options = _ParseArgs(raw_args) - arsc_package, _ = resource_utils.ExtractArscPackage(options.aapt2_path, - options.in_apk) - # Extract version from the compiled manifest since it might have been set - # via aapt, and not exist in the manifest's text form. - version_code, version_name, manifest_package = ( - resource_utils.ExtractBinaryManifestValues(options.aapt2_path, - options.in_apk)) - - new_manifest_data = _ProcessManifest(options.src_manifest, arsc_package, + new_manifest_data = _ProcessManifest(options.src_manifest, options.disable_isolated_processes) - with tempfile.NamedTemporaryFile() as tmp_manifest, \ - tempfile.NamedTemporaryFile() as tmp_apk: - tmp_manifest.write(new_manifest_data) - tmp_manifest.flush() - cmd = [ - options.aapt2_path, 'link', '-o', tmp_apk.name, '--manifest', - tmp_manifest.name, '-I', options.in_apk, '--replace-version', - '--version-code', version_code, '--version-name', version_name, - '--rename-manifest-package', manifest_package, '--debug-mode' - ] - for j in options.android_sdk_jars: - cmd += ['-I', j] - subprocess.check_call(cmd) - with zipfile.ZipFile(options.out_apk, 'w') as z: - path_transform = lambda p: None if p != 'AndroidManifest.xml' else p - build_utils.MergeZips(z, [tmp_apk.name], path_transform=path_transform) - path_transform = lambda p: None if p == 'AndroidManifest.xml' else p - build_utils.MergeZips(z, [options.in_apk], path_transform=path_transform) + with action_helpers.atomic_output(options.dst_manifest) as out_manifest: + out_manifest.write(new_manifest_data) if __name__ == '__main__': diff --git a/build/android/incremental_install/generate_android_manifest.pydeps b/build/android/incremental_install/generate_android_manifest.pydeps index 568ea1e2c02a..68c832bccb72 100644 --- a/build/android/incremental_install/generate_android_manifest.pydeps +++ b/build/android/incremental_install/generate_android_manifest.pydeps @@ -1,29 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/incremental_install --output build/android/incremental_install/generate_android_manifest.pydeps build/android/incremental_install/generate_android_manifest.py -../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py -../../../third_party/jinja2/bccache.py -../../../third_party/jinja2/compiler.py -../../../third_party/jinja2/defaults.py -../../../third_party/jinja2/environment.py -../../../third_party/jinja2/exceptions.py -../../../third_party/jinja2/filters.py -../../../third_party/jinja2/idtracking.py -../../../third_party/jinja2/lexer.py -../../../third_party/jinja2/loaders.py -../../../third_party/jinja2/nodes.py -../../../third_party/jinja2/optimizer.py -../../../third_party/jinja2/parser.py -../../../third_party/jinja2/runtime.py -../../../third_party/jinja2/tests.py -../../../third_party/jinja2/utils.py -../../../third_party/jinja2/visitor.py -../../../third_party/markupsafe/__init__.py -../../../third_party/markupsafe/_compat.py -../../../third_party/markupsafe/_native.py +../../action_helpers.py ../../gn_helpers.py ../gyp/util/__init__.py ../gyp/util/build_utils.py ../gyp/util/manifest_utils.py -../gyp/util/resource_utils.py generate_android_manifest.py diff --git a/build/android/incremental_install/installer.py b/build/android/incremental_install/installer.py index 962582298c87..68e28b48a3e6 100755 --- a/build/android/incremental_install/installer.py +++ b/build/android/incremental_install/installer.py @@ -1,6 +1,6 @@ -#!/usr/bin/env vpython +#!/usr/bin/env vpython3 # -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -10,6 +10,7 @@ import collections import functools import glob +import hashlib import json import logging import os @@ -36,6 +37,7 @@ _R8_PATH = os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'r8', 'lib', 'r8.jar') +_SHARD_JSON_FILENAME = 'shards.json' def _DeviceCachePath(device): @@ -60,17 +62,34 @@ def _GetDeviceIncrementalDir(package): return '/data/local/tmp/incremental-app-%s' % package -def _IsStale(src_paths, dest): +def _IsStale(src_paths, old_src_paths, dest_path): """Returns if |dest| is older than any of |src_paths|, or missing.""" - if not os.path.exists(dest): + if not os.path.exists(dest_path): return True - dest_time = os.path.getmtime(dest) + # Always mark as stale if any paths were added or removed. + if set(src_paths) != set(old_src_paths): + return True + dest_time = os.path.getmtime(dest_path) for path in src_paths: if os.path.getmtime(path) > dest_time: return True return False +def _LoadPrevShards(dex_staging_dir): + shards_json_path = os.path.join(dex_staging_dir, _SHARD_JSON_FILENAME) + if not os.path.exists(shards_json_path): + return {} + with open(shards_json_path) as f: + return json.load(f) + + +def _SaveNewShards(shards, dex_staging_dir): + shards_json_path = os.path.join(dex_staging_dir, _SHARD_JSON_FILENAME) + with open(shards_json_path, 'w') as f: + json.dump(shards, f) + + def _AllocateDexShards(dex_files): """Divides input dex files into buckets.""" # Goals: @@ -91,19 +110,26 @@ def _AllocateDexShards(dex_files): os.sep, '.') shards[name].append(src_path) else: - name = 'shard{}.dex.jar'.format(hash(src_path) % NUM_CORE_SHARDS) + # The stdlib hash(string) function is salted differently across python3 + # invocations. Thus we use md5 instead to consistently shard the same + # file to the same shard across runs. + hex_hash = hashlib.md5(src_path.encode('utf-8')).hexdigest() + name = 'shard{}.dex.jar'.format(int(hex_hash, 16) % NUM_CORE_SHARDS) shards[name].append(src_path) logging.info('Sharding %d dex files into %d buckets', len(dex_files), len(shards)) return shards -def _CreateDexFiles(shards, dex_staging_dir, min_api, use_concurrency): +def _CreateDexFiles(shards, prev_shards, dex_staging_dir, min_api, + use_concurrency): """Creates dex files within |dex_staging_dir| defined by |shards|.""" tasks = [] - for name, src_paths in shards.iteritems(): + for name, src_paths in shards.items(): dest_path = os.path.join(dex_staging_dir, name) - if _IsStale(src_paths, dest_path): + if _IsStale(src_paths=src_paths, + old_src_paths=prev_shards.get(name, []), + dest_path=dest_path): tasks.append( functools.partial(dex.MergeDexForIncrementalInstall, _R8_PATH, src_paths, dest_path, min_api)) @@ -146,7 +172,7 @@ def Install(device, install_json, apk=None, enable_device_cache=False, permissions: A list of the permissions to grant, or None to grant all non-denylisted permissions in the manifest. """ - if isinstance(install_json, basestring): + if isinstance(install_json, str): with open(install_json) as f: install_dict = json.load(f) else: @@ -212,10 +238,14 @@ def do_push_native(): def do_merge_dex(): merge_dex_timer.Start() + prev_shards = _LoadPrevShards(dex_staging_dir) shards = _AllocateDexShards(dex_files) build_utils.MakeDirectory(dex_staging_dir) - _CreateDexFiles(shards, dex_staging_dir, apk.GetMinSdkVersion(), - use_concurrency) + _CreateDexFiles(shards, prev_shards, dex_staging_dir, + apk.GetMinSdkVersion(), use_concurrency) + # New shard information must be saved after _CreateDexFiles since + # _CreateDexFiles removes all non-dex files from the staging dir. + _SaveNewShards(shards, dex_staging_dir) merge_dex_timer.Stop(log=False) def do_push_dex(): @@ -227,33 +257,6 @@ def do_push_dex(): _Execute(use_concurrency, do_push_native, do_merge_dex) do_push_dex() - def check_device_configured(): - target_sdk_version = int(apk.GetTargetSdkVersion()) - # Beta Q builds apply allowlist to targetSdk=28 as well. - if target_sdk_version >= 28 and device.build_version_sdk >= 28: - # In P, there are two settings: - # * hidden_api_policy_p_apps - # * hidden_api_policy_pre_p_apps - # In Q, there is just one: - # * hidden_api_policy - if device.build_version_sdk == 28: - setting_name = 'hidden_api_policy_p_apps' - else: - setting_name = 'hidden_api_policy' - apis_allowed = ''.join( - device.RunShellCommand(['settings', 'get', 'global', setting_name], - check_return=True)) - if apis_allowed.strip() not in '01': - msg = """\ -Cannot use incremental installs on Android P+ without first enabling access to -non-SDK interfaces (https://developer.android.com/preview/non-sdk-q). - -To enable access: - adb -s {0} shell settings put global {1} 0 -To restore back to default: - adb -s {0} shell settings delete global {1}""" - raise Exception(msg.format(device.serial, setting_name)) - cache_path = _DeviceCachePath(device) def restore_cache(): if not enable_device_cache: @@ -294,8 +297,7 @@ def release_installer_lock(): # Concurrency here speeds things up quite a bit, but DeviceUtils hasn't # been designed for multi-threading. Enabling only because this is a # developer-only tool. - setup_timer = _Execute(use_concurrency, create_lock_files, restore_cache, - check_device_configured) + setup_timer = _Execute(use_concurrency, create_lock_files, restore_cache) _Execute(use_concurrency, do_install, do_push_files) diff --git a/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java b/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java index f7003f27eada..f88297050758 100644 --- a/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java +++ b/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java @@ -1,4 +1,4 @@ -// Copyright 2015 The Chromium Authors. All rights reserved. +// Copyright 2015 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java b/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java index f19740649998..f1f507af8bc1 100644 --- a/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java +++ b/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java @@ -1,4 +1,4 @@ -// Copyright 2015 The Chromium Authors. All rights reserved. +// Copyright 2015 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java b/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java index b6d752247b71..53e926e626bd 100644 --- a/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java +++ b/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java @@ -1,4 +1,4 @@ -// Copyright 2015 The Chromium Authors. All rights reserved. +// Copyright 2015 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -153,34 +153,30 @@ void importNativeLibs(File libDir) throws ReflectiveOperationException, IOExcept @SuppressLint("SetWorldReadable") private void safeCopyAllFiles(File srcDir, File dstDir) throws IOException { + if (!mIsPrimaryProcess) { + // TODO: Work around this issue by using APK splits to install each dex / lib. + throw new RuntimeException("Incremental install does not work on Android M+ " + + "with isolated processes. Build system should have removed this. " + + "Please file a bug."); + } + // The library copying is not necessary on older devices, but we do it anyways to // simplify things (it's fast compared to dexing). // https://code.google.com/p/android/issues/detail?id=79480 + ensureAppFilesSubDirExists(); File lockFile = new File(mAppFilesSubDir, dstDir.getName() + ".lock"); - if (mIsPrimaryProcess) { - ensureAppFilesSubDirExists(); - LockFile lock = LockFile.acquireRuntimeLock(lockFile); - if (lock == null) { - LockFile.waitForRuntimeLock(lockFile, 10 * 1000); - } else { - try { - dstDir.mkdir(); - dstDir.setReadable(true, false); - dstDir.setExecutable(true, false); - copyChangedFiles(srcDir, dstDir); - } finally { - lock.release(); - } - } + LockFile lock = LockFile.acquireRuntimeLock(lockFile); + if (lock == null) { + LockFile.waitForRuntimeLock(lockFile, 10 * 1000); } else { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { - // TODO: Work around this issue by using APK splits to install each dex / lib. - throw new RuntimeException("Incremental install does not work on Android M+ " - + "with isolated processes. Build system should have removed this. " - + "Please file a bug."); + try { + dstDir.mkdir(); + dstDir.setReadable(true, false); + dstDir.setExecutable(true, false); + copyChangedFiles(srcDir, dstDir); + } finally { + lock.release(); } - // Other processes: Waits for primary process to finish copying. - LockFile.waitForRuntimeLock(lockFile, 10 * 1000); } } @@ -291,14 +287,9 @@ private Object[] addDexElements(File[] files, File optimizedDirectory, Object[] File emptyDir = new File(""); for (int i = 0; i < files.length; ++i) { File file = files[i]; - Object dexFile; - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { - // loadDexFile requires that ret contain all previously added elements. - dexFile = Reflect.invokeMethod(clazz, "loadDexFile", file, optimizedDirectory, - mClassLoader, ret); - } else { - dexFile = Reflect.invokeMethod(clazz, "loadDexFile", file, optimizedDirectory); - } + // loadDexFile requires that ret contain all previously added elements. + Object dexFile = Reflect.invokeMethod( + clazz, "loadDexFile", file, optimizedDirectory, mClassLoader, ret); Object dexElement; if (Build.VERSION.SDK_INT >= 26) { dexElement = Reflect.newInstance(entryClazz, dexFile, file); diff --git a/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java b/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java index 19d1f7624e1b..08d4c66c3070 100644 --- a/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java +++ b/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java @@ -1,4 +1,4 @@ -// Copyright 2015 The Chromium Authors. All rights reserved. +// Copyright 2015 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java b/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java index c64dc1e8a313..6ce74eb819ce 100644 --- a/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java +++ b/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java @@ -1,14 +1,19 @@ -// Copyright 2015 The Chromium Authors. All rights reserved. +// Copyright 2015 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.incrementalinstall; +import android.os.Build; + +import org.lsposed.hiddenapibypass.HiddenApiBypass; + import java.lang.reflect.Array; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.Arrays; +import java.util.List; /** * Reflection helper methods. @@ -79,12 +84,22 @@ static Object newInstance(Class clazz, Object... params) private static Field findField(Object instance, String name) throws NoSuchFieldException { boolean isStatic = instance instanceof Class; - Class clazz = isStatic ? (Class) instance : instance.getClass(); + Class clazz = isStatic ? (Class) instance : instance.getClass(); for (; clazz != null; clazz = clazz.getSuperclass()) { - try { - return clazz.getDeclaredField(name); - } catch (NoSuchFieldException e) { - // Need to look in the super class. + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.P) { + try { + return clazz.getDeclaredField(name); + } catch (NoSuchFieldException e) { + // Need to look in the super class. + } + } else { + List fields = isStatic ? HiddenApiBypass.getStaticFields(clazz) + : HiddenApiBypass.getInstanceFields(clazz); + for (Field field : fields) { + if (field.getName().equals(name)) { + return field; + } + } } } throw new NoSuchFieldException("Field " + name + " not found in " + instance.getClass()); diff --git a/build/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java b/build/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java index 3e0df0521e94..ecf4870e80f1 100644 --- a/build/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java +++ b/build/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java @@ -1,4 +1,4 @@ -// Copyright 2017 The Chromium Authors. All rights reserved. +// Copyright 2017 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/incremental_install/third_party/AndroidHiddenApiBypass/BUILD.gn b/build/android/incremental_install/third_party/AndroidHiddenApiBypass/BUILD.gn new file mode 100644 index 000000000000..86e146663330 --- /dev/null +++ b/build/android/incremental_install/third_party/AndroidHiddenApiBypass/BUILD.gn @@ -0,0 +1,29 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +# Approved by chrome-security@ only for use by incremental install. +visibility = [ + ":*", + "//build/android/incremental_install:*", +] + +android_library("stub_java") { + sources = [ "stub/src/main/java/dalvik/system/VMRuntime.java" ] + jar_excluded_patterns = [ "*" ] +} + +android_library("hidden_api_bypass_java") { + sources = [ + "library/src/main/java/org/lsposed/hiddenapibypass/Helper.java", + "library/src/main/java/org/lsposed/hiddenapibypass/HiddenApiBypass.java", + "local_modifications/org/lsposed/hiddenapibypass/library/BuildConfig.java", + ] + deps = [ + ":stub_java", + "//third_party/androidx:androidx_annotation_annotation_jvm_java", + ] + jacoco_never_instrument = true +} diff --git a/build/android/incremental_install/third_party/AndroidHiddenApiBypass/LICENSE b/build/android/incremental_install/third_party/AndroidHiddenApiBypass/LICENSE new file mode 100644 index 000000000000..261eeb9e9f8b --- /dev/null +++ b/build/android/incremental_install/third_party/AndroidHiddenApiBypass/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/build/android/incremental_install/third_party/AndroidHiddenApiBypass/README.chromium b/build/android/incremental_install/third_party/AndroidHiddenApiBypass/README.chromium new file mode 100644 index 000000000000..b1fdc9c594f1 --- /dev/null +++ b/build/android/incremental_install/third_party/AndroidHiddenApiBypass/README.chromium @@ -0,0 +1,16 @@ +Name: AndroidHiddenApiBypass +URL: https://github.com/LSPosed/AndroidHiddenApiBypass +Version: b16cc3934a27e55e51f00f5504c7f49e7c8cfab7 +License: Apache 2.0 +License File: NOT_SHIPPED +Security Critical: no + +Description: +AndroidHiddenApiBypass enables reflection on APIs that are meant to be guarded +by Android's API Blocklist. + +Local Modifications: +* Removed files related to Gradle. +* Added local_modifications/.../BuildConfig.java to replace what Gradle would + have generated. +* Added BUILD.gn diff --git a/build/android/incremental_install/third_party/AndroidHiddenApiBypass/README.md b/build/android/incremental_install/third_party/AndroidHiddenApiBypass/README.md new file mode 100644 index 000000000000..c7e06817ed87 --- /dev/null +++ b/build/android/incremental_install/third_party/AndroidHiddenApiBypass/README.md @@ -0,0 +1,84 @@ +# AndroidHiddenApiBypass + +[![Android CI status](https://github.com/LSPosed/AndroidHiddenApiBypass/actions/workflows/android.yml/badge.svg?branch=main)](https://github.com/LSPosed/AndroidHiddenApiBypass/actions/workflows/android.yml) + +Bypass restrictions on non-SDK interfaces. + +## Why AndroidHiddenApiBypass? + +- Pure Java: no native code used. +- Reliable: does not rely on specific behaviors, so it will not be blocked like meta-reflection or `dexfile`. +- Stable: `unsafe`, art structs and `setHiddenApiExemptions` are stable APIs. + +[How it works (Chinese)](https://lovesykun.cn/archives/android-hidden-api-bypass.html) + +## Integration + +Gradle: + +```gradle +repositories { + mavenCentral() +} +dependencies { + implementation 'org.lsposed.hiddenapibypass:hiddenapibypass:4.3' +} +``` + +## Usage + +1. Invoke a restricted method: + ```java + HiddenApiBypass.invoke(ApplicationInfo.class, new ApplicationInfo(), "usesNonSdkApi"/*, args*/) + ``` +1. Invoke restricted constructor: + ```java + Object instance = HiddenApiBypass.newInstance(Class.forName("android.app.IActivityManager$Default")/*, args*/); + ``` +1. Get all methods including restricted ones from a class: + ```java + var allMethods = HiddenApiBypass.getDeclaredMethods(ApplicationInfo.class); + ((Method).stream(allMethods).filter(e -> e.getName().equals("usesNonSdkApi")).findFirst().get()).invoke(new ApplicationInfo()); + ``` +1. Get all non-static fields including restricted ones from a class: + ```java + var allInstanceFields = HiddenApiBypass.getInstanceFields(ApplicationInfo.class); + ((Method).stream(allInstanceFields).filter(e -> e.getName().equals("longVersionCode")).findFirst().get()).get(new ApplicationInfo()); + ``` +1. Get all static fields including restricted ones from a class: + ```java + var allStaticFields = HiddenApiBypass.getStaticFields(ApplicationInfo.class); + ((Method).stream(allInstanceFields).filter(e -> e.getName().equals("HIDDEN_API_ENFORCEMENT_DEFAULT")).findFirst().get()).get(null); + ``` +1. Get specific class method or class constructor + ```java + var ctor = HiddenApiBypass.getDeclaredConstructor(ClipDrawable.class /*, args */); + var method = HiddenApiBypass.getDeclaredMethod(ApplicationInfo.class, "getHiddenApiEnforcementPolicy" /*, args */); + ``` +1. Add a class to exemption list: + ```java + HiddenApiBypass.addHiddenApiExemptions( + "Landroid/content/pm/ApplicationInfo;", // one specific class + "Ldalvik/system" // all classes in packages dalvik.system + "Lx" // all classes whose full name is started with x + ); + ``` + if you are going to add all classes to exemption list, just leave an empty prefix: + ```java + HiddenApiBypass.addHiddenApiExemptions(""); + ``` +## License + + Copyright 2021 LSPosed + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/build/android/incremental_install/third_party/AndroidHiddenApiBypass/library/src/main/java/org/lsposed/hiddenapibypass/Helper.java b/build/android/incremental_install/third_party/AndroidHiddenApiBypass/library/src/main/java/org/lsposed/hiddenapibypass/Helper.java new file mode 100644 index 000000000000..07d130dd29cf --- /dev/null +++ b/build/android/incremental_install/third_party/AndroidHiddenApiBypass/library/src/main/java/org/lsposed/hiddenapibypass/Helper.java @@ -0,0 +1,108 @@ +/* + * Copyright (C) 2021 LSPosed + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.lsposed.hiddenapibypass; + +import java.lang.invoke.MethodHandleInfo; +import java.lang.invoke.MethodType; +import java.lang.reflect.Member; + +@SuppressWarnings("unused") +public class Helper { + static public class MethodHandle { + private final MethodType type = null; + private MethodType nominalType; + private MethodHandle cachedSpreadInvoker; + protected final int handleKind = 0; + + // The ArtMethod* or ArtField* associated with this method handle (used by the runtime). + protected final long artFieldOrMethod = 0; + } + + static final public class MethodHandleImpl extends MethodHandle { + private final MethodHandleInfo info = null; + } + + static final public class HandleInfo { + private final Member member = null; + private final MethodHandle handle = null; + } + + static final public class Class { + private transient ClassLoader classLoader; + private transient java.lang.Class componentType; + private transient Object dexCache; + private transient Object extData; + private transient Object[] ifTable; + private transient String name; + private transient java.lang.Class superClass; + private transient Object vtable; + private transient long iFields; + private transient long methods; + private transient long sFields; + private transient int accessFlags; + private transient int classFlags; + private transient int classSize; + private transient int clinitThreadId; + private transient int dexClassDefIndex; + private transient volatile int dexTypeIndex; + private transient int numReferenceInstanceFields; + private transient int numReferenceStaticFields; + private transient int objectSize; + private transient int objectSizeAllocFastPath; + private transient int primitiveType; + private transient int referenceInstanceOffsets; + private transient int status; + private transient short copiedMethodsOffset; + private transient short virtualMethodsOffset; + } + + static public class AccessibleObject { + private boolean override; + } + + static final public class Executable extends AccessibleObject { + private Class declaringClass; + private Class declaringClassOfOverriddenMethod; + private Object[] parameters; + private long artMethod; + private int accessFlags; + } + + @SuppressWarnings("EmptyMethod") + public static class NeverCall { + private static void a() { + } + + private static void b() { + } + + private static int s; + private static int t; + private int i; + private int j; + } + + public static class InvokeStub { + private static Object invoke(Object... args) { + throw new IllegalStateException("Failed to invoke the method"); + } + + private InvokeStub(Object... args) { + throw new IllegalStateException("Failed to new a instance"); + } + } +} diff --git a/build/android/incremental_install/third_party/AndroidHiddenApiBypass/library/src/main/java/org/lsposed/hiddenapibypass/HiddenApiBypass.java b/build/android/incremental_install/third_party/AndroidHiddenApiBypass/library/src/main/java/org/lsposed/hiddenapibypass/HiddenApiBypass.java new file mode 100644 index 000000000000..2344acff9e96 --- /dev/null +++ b/build/android/incremental_install/third_party/AndroidHiddenApiBypass/library/src/main/java/org/lsposed/hiddenapibypass/HiddenApiBypass.java @@ -0,0 +1,415 @@ +/* + * Copyright (C) 2021 LSPosed + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.lsposed.hiddenapibypass; + +import android.os.Build; +import android.util.Log; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import androidx.annotation.VisibleForTesting; + +import org.lsposed.hiddenapibypass.library.BuildConfig; + +import java.lang.invoke.MethodHandle; +import java.lang.invoke.MethodHandleInfo; +import java.lang.invoke.MethodHandles; +import java.lang.reflect.Constructor; +import java.lang.reflect.Executable; +import java.lang.reflect.Field; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Type; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import dalvik.system.VMRuntime; +import sun.misc.Unsafe; + +@RequiresApi(Build.VERSION_CODES.P) +public final class HiddenApiBypass { + private static final String TAG = "HiddenApiBypass"; + private static final Unsafe unsafe; + private static final long methodOffset; + private static final long classOffset; + private static final long artOffset; + private static final long infoOffset; + private static final long methodsOffset; + private static final long iFieldOffset; + private static final long sFieldOffset; + private static final long memberOffset; + private static final long artMethodSize; + private static final long artMethodBias; + private static final long artFieldSize; + private static final long artFieldBias; + private static final Set signaturePrefixes = new HashSet<>(); + + static { + try { + //noinspection JavaReflectionMemberAccess DiscouragedPrivateApi + unsafe = (Unsafe) Unsafe.class.getDeclaredMethod("getUnsafe").invoke(null); + assert unsafe != null; + methodOffset = unsafe.objectFieldOffset(Helper.Executable.class.getDeclaredField("artMethod")); + classOffset = unsafe.objectFieldOffset(Helper.Executable.class.getDeclaredField("declaringClass")); + artOffset = unsafe.objectFieldOffset(Helper.MethodHandle.class.getDeclaredField("artFieldOrMethod")); + infoOffset = unsafe.objectFieldOffset(Helper.MethodHandleImpl.class.getDeclaredField("info")); + methodsOffset = unsafe.objectFieldOffset(Helper.Class.class.getDeclaredField("methods")); + iFieldOffset = unsafe.objectFieldOffset(Helper.Class.class.getDeclaredField("iFields")); + sFieldOffset = unsafe.objectFieldOffset(Helper.Class.class.getDeclaredField("sFields")); + memberOffset = unsafe.objectFieldOffset(Helper.HandleInfo.class.getDeclaredField("member")); + Method mA = Helper.NeverCall.class.getDeclaredMethod("a"); + Method mB = Helper.NeverCall.class.getDeclaredMethod("b"); + mA.setAccessible(true); + mB.setAccessible(true); + MethodHandle mhA = MethodHandles.lookup().unreflect(mA); + MethodHandle mhB = MethodHandles.lookup().unreflect(mB); + long aAddr = unsafe.getLong(mhA, artOffset); + long bAddr = unsafe.getLong(mhB, artOffset); + long aMethods = unsafe.getLong(Helper.NeverCall.class, methodsOffset); + artMethodSize = bAddr - aAddr; + if (BuildConfig.DEBUG) Log.v(TAG, artMethodSize + " " + + Long.toString(aAddr, 16) + ", " + + Long.toString(bAddr, 16) + ", " + + Long.toString(aMethods, 16)); + artMethodBias = aAddr - aMethods - artMethodSize; + Field fI = Helper.NeverCall.class.getDeclaredField("i"); + Field fJ = Helper.NeverCall.class.getDeclaredField("j"); + fI.setAccessible(true); + fJ.setAccessible(true); + MethodHandle mhI = MethodHandles.lookup().unreflectGetter(fI); + MethodHandle mhJ = MethodHandles.lookup().unreflectGetter(fJ); + long iAddr = unsafe.getLong(mhI, artOffset); + long jAddr = unsafe.getLong(mhJ, artOffset); + long iFields = unsafe.getLong(Helper.NeverCall.class, iFieldOffset); + artFieldSize = jAddr - iAddr; + if (BuildConfig.DEBUG) Log.v(TAG, artFieldSize + " " + + Long.toString(iAddr, 16) + ", " + + Long.toString(jAddr, 16) + ", " + + Long.toString(iFields, 16)); + artFieldBias = iAddr - iFields; + } catch (ReflectiveOperationException e) { + Log.e(TAG, "Initialize error", e); + throw new ExceptionInInitializerError(e); + } + } + + @VisibleForTesting + static boolean checkArgsForInvokeMethod(Class[] params, Object[] args) { + if (params.length != args.length) return false; + for (int i = 0; i < params.length; ++i) { + if (params[i].isPrimitive()) { + if (params[i] == int.class && !(args[i] instanceof Integer)) return false; + else if (params[i] == byte.class && !(args[i] instanceof Byte)) return false; + else if (params[i] == char.class && !(args[i] instanceof Character)) return false; + else if (params[i] == boolean.class && !(args[i] instanceof Boolean)) return false; + else if (params[i] == double.class && !(args[i] instanceof Double)) return false; + else if (params[i] == float.class && !(args[i] instanceof Float)) return false; + else if (params[i] == long.class && !(args[i] instanceof Long)) return false; + else if (params[i] == short.class && !(args[i] instanceof Short)) return false; + } else if (args[i] != null && !params[i].isInstance(args[i])) return false; + } + return true; + } + + /** + * create an instance of the given class {@code clazz} calling the restricted constructor with arguments {@code args} + * + * @param clazz the class of the instance to new + * @param initargs arguments to call constructor + * @return the new instance + * @see Constructor#newInstance(Object...) + */ + public static Object newInstance(@NonNull Class clazz, Object... initargs) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException { + Method stub = Helper.InvokeStub.class.getDeclaredMethod("invoke", Object[].class); + Constructor ctor = Helper.InvokeStub.class.getDeclaredConstructor(Object[].class); + ctor.setAccessible(true); + long methods = unsafe.getLong(clazz, methodsOffset); + if (methods == 0) throw new NoSuchMethodException("Cannot find matching constructor"); + int numMethods = unsafe.getInt(methods); + if (BuildConfig.DEBUG) Log.d(TAG, clazz + " has " + numMethods + " methods"); + for (int i = 0; i < numMethods; i++) { + long method = methods + i * artMethodSize + artMethodBias; + unsafe.putLong(stub, methodOffset, method); + if (BuildConfig.DEBUG) Log.v(TAG, "got " + clazz.getTypeName() + "." + stub.getName() + + "(" + Arrays.stream(stub.getParameterTypes()).map(Type::getTypeName).collect(Collectors.joining()) + ")"); + if ("".equals(stub.getName())) { + unsafe.putLong(ctor, methodOffset, method); + unsafe.putObject(ctor, classOffset, clazz); + Class[] params = ctor.getParameterTypes(); + if (checkArgsForInvokeMethod(params, initargs)) + return ctor.newInstance(initargs); + } + } + throw new NoSuchMethodException("Cannot find matching constructor"); + } + + /** + * invoke a restrict method named {@code methodName} of the given class {@code clazz} with this object {@code thiz} and arguments {@code args} + * + * @param clazz the class call the method on (this parameter is required because this method cannot call inherit method) + * @param thiz this object, which can be {@code null} if the target method is static + * @param methodName the method name + * @param args arguments to call the method with name {@code methodName} + * @return the return value of the method + * @see Method#invoke(Object, Object...) + */ + public static Object invoke(@NonNull Class clazz, @Nullable Object thiz, @NonNull String methodName, Object... args) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { + if (thiz != null && !clazz.isInstance(thiz)) { + throw new IllegalArgumentException("this object is not an instance of the given class"); + } + Method stub = Helper.InvokeStub.class.getDeclaredMethod("invoke", Object[].class); + stub.setAccessible(true); + long methods = unsafe.getLong(clazz, methodsOffset); + if (methods == 0) throw new NoSuchMethodException("Cannot find matching method"); + int numMethods = unsafe.getInt(methods); + if (BuildConfig.DEBUG) Log.d(TAG, clazz + " has " + numMethods + " methods"); + for (int i = 0; i < numMethods; i++) { + long method = methods + i * artMethodSize + artMethodBias; + unsafe.putLong(stub, methodOffset, method); + if (BuildConfig.DEBUG) Log.v(TAG, "got " + clazz.getTypeName() + "." + stub.getName() + + "(" + Arrays.stream(stub.getParameterTypes()).map(Type::getTypeName).collect(Collectors.joining()) + ")"); + if (methodName.equals(stub.getName())) { + Class[] params = stub.getParameterTypes(); + if (checkArgsForInvokeMethod(params, args)) + return stub.invoke(thiz, args); + } + } + throw new NoSuchMethodException("Cannot find matching method"); + } + + /** + * get declared methods of given class without hidden api restriction + * + * @param clazz the class to fetch declared methods (including constructors with name `<init>`) + * @return list of declared methods of {@code clazz} + */ + @NonNull + public static List getDeclaredMethods(@NonNull Class clazz) { + ArrayList list = new ArrayList<>(); + if (clazz.isPrimitive() || clazz.isArray()) return list; + MethodHandle mh; + try { + Method mA = Helper.NeverCall.class.getDeclaredMethod("a"); + mA.setAccessible(true); + mh = MethodHandles.lookup().unreflect(mA); + } catch (NoSuchMethodException | IllegalAccessException e) { + return list; + } + long methods = unsafe.getLong(clazz, methodsOffset); + if (methods == 0) return list; + int numMethods = unsafe.getInt(methods); + if (BuildConfig.DEBUG) Log.d(TAG, clazz + " has " + numMethods + " methods"); + for (int i = 0; i < numMethods; i++) { + long method = methods + i * artMethodSize + artMethodBias; + unsafe.putLong(mh, artOffset, method); + unsafe.putObject(mh, infoOffset, null); + try { + MethodHandles.lookup().revealDirect(mh); + } catch (Throwable ignored) { + } + MethodHandleInfo info = (MethodHandleInfo) unsafe.getObject(mh, infoOffset); + Executable member = (Executable) unsafe.getObject(info, memberOffset); + if (BuildConfig.DEBUG) + Log.v(TAG, "got " + clazz.getTypeName() + "." + member.getName() + + "(" + Arrays.stream(member.getParameterTypes()).map(Type::getTypeName).collect(Collectors.joining()) + ")"); + list.add(member); + } + return list; + } + + /** + * get a restrict method named {@code methodName} of the given class {@code clazz} with argument types {@code parameterTypes} + * + * @param clazz the class where the expected method declares + * @param methodName the expected method's name + * @param parameterTypes argument types of the expected method with name {@code methodName} + * @return the found method + * @throws NoSuchMethodException when no method matches the given parameters + * @see Class#getDeclaredMethod(String, Class[]) + */ + @NonNull + public static Method getDeclaredMethod(@NonNull Class clazz, @NonNull String methodName, @NonNull Class... parameterTypes) throws NoSuchMethodException { + List methods = getDeclaredMethods(clazz); + allMethods: + for (Executable method : methods) { + if (!method.getName().equals(methodName)) continue; + if (!(method instanceof Method)) continue; + Class[] expectedTypes = method.getParameterTypes(); + if (expectedTypes.length != parameterTypes.length) continue; + for (int i = 0; i < parameterTypes.length; ++i) { + if (parameterTypes[i] != expectedTypes[i]) continue allMethods; + } + return (Method) method; + } + throw new NoSuchMethodException("Cannot find matching method"); + } + + /** + * get a restrict constructor of the given class {@code clazz} with argument types {@code parameterTypes} + * + * @param clazz the class where the expected constructor declares + * @param parameterTypes argument types of the expected constructor + * @return the found constructor + * @throws NoSuchMethodException when no constructor matches the given parameters + * @see Class#getDeclaredConstructor(Class[]) + */ + @NonNull + public static Constructor getDeclaredConstructor(@NonNull Class clazz, @NonNull Class... parameterTypes) throws NoSuchMethodException { + List methods = getDeclaredMethods(clazz); + allMethods: + for (Executable method : methods) { + if (!(method instanceof Constructor)) continue; + Class[] expectedTypes = method.getParameterTypes(); + if (expectedTypes.length != parameterTypes.length) continue; + for (int i = 0; i < parameterTypes.length; ++i) { + if (parameterTypes[i] != expectedTypes[i]) continue allMethods; + } + return (Constructor) method; + } + throw new NoSuchMethodException("Cannot find matching constructor"); + } + + + /** + * get declared non-static fields of given class without hidden api restriction + * + * @param clazz the class to fetch declared methods + * @return list of declared non-static fields of {@code clazz} + */ + @NonNull + public static List getInstanceFields(@NonNull Class clazz) { + ArrayList list = new ArrayList<>(); + if (clazz.isPrimitive() || clazz.isArray()) return list; + MethodHandle mh; + try { + Field fI = Helper.NeverCall.class.getDeclaredField("i"); + fI.setAccessible(true); + mh = MethodHandles.lookup().unreflectGetter(fI); + } catch (IllegalAccessException | NoSuchFieldException e) { + return list; + } + long fields = unsafe.getLong(clazz, iFieldOffset); + if (fields == 0) return list; + int numFields = unsafe.getInt(fields); + if (BuildConfig.DEBUG) Log.d(TAG, clazz + " has " + numFields + " instance fields"); + for (int i = 0; i < numFields; i++) { + long field = fields + i * artFieldSize + artFieldBias; + unsafe.putLong(mh, artOffset, field); + unsafe.putObject(mh, infoOffset, null); + try { + MethodHandles.lookup().revealDirect(mh); + } catch (Throwable ignored) { + } + MethodHandleInfo info = (MethodHandleInfo) unsafe.getObject(mh, infoOffset); + Field member = (Field) unsafe.getObject(info, memberOffset); + if (BuildConfig.DEBUG) + Log.v(TAG, "got " + member.getType() + " " + clazz.getTypeName() + "." + member.getName()); + list.add(member); + } + return list; + } + + /** + * get declared static fields of given class without hidden api restriction + * + * @param clazz the class to fetch declared methods + * @return list of declared static fields of {@code clazz} + */ + @NonNull + public static List getStaticFields(@NonNull Class clazz) { + ArrayList list = new ArrayList<>(); + if (clazz.isPrimitive() || clazz.isArray()) return list; + MethodHandle mh; + try { + Field fS = Helper.NeverCall.class.getDeclaredField("s"); + fS.setAccessible(true); + mh = MethodHandles.lookup().unreflectGetter(fS); + } catch (IllegalAccessException | NoSuchFieldException e) { + return list; + } + long fields = unsafe.getLong(clazz, sFieldOffset); + if (fields == 0) return list; + int numFields = unsafe.getInt(fields); + if (BuildConfig.DEBUG) Log.d(TAG, clazz + " has " + numFields + " static fields"); + for (int i = 0; i < numFields; i++) { + long field = fields + i * artFieldSize + artFieldBias; + unsafe.putLong(mh, artOffset, field); + unsafe.putObject(mh, infoOffset, null); + try { + MethodHandles.lookup().revealDirect(mh); + } catch (Throwable ignored) { + } + MethodHandleInfo info = (MethodHandleInfo) unsafe.getObject(mh, infoOffset); + Field member = (Field) unsafe.getObject(info, memberOffset); + if (BuildConfig.DEBUG) + Log.v(TAG, "got " + member.getType() + " " + clazz.getTypeName() + "." + member.getName()); + list.add(member); + } + return list; + } + + /** + * Sets the list of exemptions from hidden API access enforcement. + * + * @param signaturePrefixes A list of class signature prefixes. Each item in the list is a prefix match on the type + * signature of a blacklisted API. All matching APIs are treated as if they were on + * the whitelist: access permitted, and no logging.. + * @return whether the operation is successful + */ + public static boolean setHiddenApiExemptions(@NonNull String... signaturePrefixes) { + try { + Object runtime = invoke(VMRuntime.class, null, "getRuntime"); + invoke(VMRuntime.class, runtime, "setHiddenApiExemptions", (Object) signaturePrefixes); + return true; + } catch (Throwable e) { + Log.w(TAG, "setHiddenApiExemptions", e); + return false; + } + } + + /** + * Adds the list of exemptions from hidden API access enforcement. + * + * @param signaturePrefixes A list of class signature prefixes. Each item in the list is a prefix match on the type + * signature of a blacklisted API. All matching APIs are treated as if they were on + * the whitelist: access permitted, and no logging.. + * @return whether the operation is successful + */ + public static boolean addHiddenApiExemptions(String... signaturePrefixes) { + HiddenApiBypass.signaturePrefixes.addAll(Arrays.asList(signaturePrefixes)); + String[] strings = new String[HiddenApiBypass.signaturePrefixes.size()]; + HiddenApiBypass.signaturePrefixes.toArray(strings); + return setHiddenApiExemptions(strings); + } + + /** + * Clear the list of exemptions from hidden API access enforcement. + * Android runtime will cache access flags, so if a hidden API has been accessed unrestrictedly, + * running this method will not restore the restriction on it. + * + * @return whether the operation is successful + */ + public static boolean clearHiddenApiExemptions() { + HiddenApiBypass.signaturePrefixes.clear(); + return setHiddenApiExemptions(); + } +} diff --git a/build/android/incremental_install/third_party/AndroidHiddenApiBypass/local_modifications/org/lsposed/hiddenapibypass/library/BuildConfig.java b/build/android/incremental_install/third_party/AndroidHiddenApiBypass/local_modifications/org/lsposed/hiddenapibypass/library/BuildConfig.java new file mode 100644 index 000000000000..9788a8edcbff --- /dev/null +++ b/build/android/incremental_install/third_party/AndroidHiddenApiBypass/local_modifications/org/lsposed/hiddenapibypass/library/BuildConfig.java @@ -0,0 +1,9 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +package org.lsposed.hiddenapibypass.library; + +/** When building with Gradle, this file would be generated. */ +public class BuildConfig { + public static final boolean DEBUG = false; +} diff --git a/build/android/incremental_install/third_party/AndroidHiddenApiBypass/stub/src/main/java/dalvik/system/VMRuntime.java b/build/android/incremental_install/third_party/AndroidHiddenApiBypass/stub/src/main/java/dalvik/system/VMRuntime.java new file mode 100644 index 000000000000..87db1ece1b61 --- /dev/null +++ b/build/android/incremental_install/third_party/AndroidHiddenApiBypass/stub/src/main/java/dalvik/system/VMRuntime.java @@ -0,0 +1,9 @@ +package dalvik.system; + +@SuppressWarnings("unused") +public class VMRuntime { + public static VMRuntime getRuntime() { + throw new IllegalArgumentException("stub"); + } + public native void setHiddenApiExemptions(String[] signaturePrefixes); +} diff --git a/build/android/incremental_install/write_installer_json.py b/build/android/incremental_install/write_installer_json.py index cf1d2d4c57a1..4825a80e1afc 100755 --- a/build/android/incremental_install/write_installer_json.py +++ b/build/android/incremental_install/write_installer_json.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -14,6 +14,7 @@ sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'gyp')) from util import build_utils +import action_helpers # build_utils adds //build to sys.path. def _ParseArgs(args): @@ -44,8 +45,8 @@ def _ParseArgs(args): help='Print a warning about proguard being disabled') options = parser.parse_args(args) - options.dex_files = build_utils.ParseGnList(options.dex_files) - options.native_libs = build_utils.ParseGnList(options.native_libs) + options.dex_files = action_helpers.parse_gn_list(options.dex_files) + options.native_libs = action_helpers.parse_gn_list(options.native_libs) return options @@ -60,7 +61,7 @@ def main(args): 'split_globs': options.split_globs, } - with build_utils.AtomicOutput(options.output_path, mode='w+') as f: + with action_helpers.atomic_output(options.output_path, mode='w+') as f: json.dump(data, f, indent=2, sort_keys=True) diff --git a/build/android/incremental_install/write_installer_json.pydeps b/build/android/incremental_install/write_installer_json.pydeps index 11a263f4a82d..519281fe6fc2 100644 --- a/build/android/incremental_install/write_installer_json.pydeps +++ b/build/android/incremental_install/write_installer_json.pydeps @@ -1,5 +1,6 @@ # Generated by running: # build/print_python_deps.py --root build/android/incremental_install --output build/android/incremental_install/write_installer_json.pydeps build/android/incremental_install/write_installer_json.py +../../action_helpers.py ../../gn_helpers.py ../gyp/util/__init__.py ../gyp/util/build_utils.py diff --git a/build/android/java/src/org/chromium/build/annotations/AlwaysInline.java b/build/android/java/src/org/chromium/build/annotations/AlwaysInline.java new file mode 100644 index 000000000000..e79bfe77abdd --- /dev/null +++ b/build/android/java/src/org/chromium/build/annotations/AlwaysInline.java @@ -0,0 +1,17 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Tells R8 to always inline the annotated method/constructor. + */ +@Target({ElementType.CONSTRUCTOR, ElementType.METHOD}) +@Retention(RetentionPolicy.CLASS) +public @interface AlwaysInline {} diff --git a/build/android/java/src/org/chromium/build/annotations/CheckDiscard.java b/build/android/java/src/org/chromium/build/annotations/CheckDiscard.java new file mode 100644 index 000000000000..897067e2ee96 --- /dev/null +++ b/build/android/java/src/org/chromium/build/annotations/CheckDiscard.java @@ -0,0 +1,24 @@ +// Copyright 2019 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Causes build to assert that annotated classes / methods / fields are + * optimized away in release builds (without dcheck_always_on). + */ +@Target({ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.METHOD, ElementType.TYPE}) +@Retention(RetentionPolicy.CLASS) +public @interface CheckDiscard { + /** + * Describes why the element should be discarded. + * @return reason for discarding (crbug links are preferred unless reason is trivial). + */ + String value(); +} diff --git a/build/android/java/src/org/chromium/build/annotations/DoNotClassMerge.java b/build/android/java/src/org/chromium/build/annotations/DoNotClassMerge.java new file mode 100644 index 000000000000..94c9fa3a690c --- /dev/null +++ b/build/android/java/src/org/chromium/build/annotations/DoNotClassMerge.java @@ -0,0 +1,20 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * The annotated class should never be horizontally or vertically merged. + * + * The annotated classes are guaranteed not to be horizontally or vertically + * merged by Proguard. Other optimizations may still apply. + */ +@Target({ElementType.TYPE}) +@Retention(RetentionPolicy.CLASS) +public @interface DoNotClassMerge {} diff --git a/build/android/java/src/org/chromium/build/annotations/DoNotInline.java b/build/android/java/src/org/chromium/build/annotations/DoNotInline.java new file mode 100644 index 000000000000..4dd193332da2 --- /dev/null +++ b/build/android/java/src/org/chromium/build/annotations/DoNotInline.java @@ -0,0 +1,20 @@ +// Copyright 2018 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * The annotated method or class should never be inlined. + * + * The annotated method (or methods on the annotated class) are guaranteed not to be inlined by + * Proguard. Other optimizations may still apply. + */ +@Target({ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.METHOD, ElementType.TYPE}) +@Retention(RetentionPolicy.CLASS) +public @interface DoNotInline {} diff --git a/build/android/java/src/org/chromium/build/annotations/DoNotStripLogs.java b/build/android/java/src/org/chromium/build/annotations/DoNotStripLogs.java new file mode 100644 index 000000000000..be96d9a590e2 --- /dev/null +++ b/build/android/java/src/org/chromium/build/annotations/DoNotStripLogs.java @@ -0,0 +1,17 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * The annotated method or class will have -maximumremovedandroidloglevel 0 applied to it. + */ +@Target({ElementType.CONSTRUCTOR, ElementType.METHOD, ElementType.TYPE}) +@Retention(RetentionPolicy.CLASS) +public @interface DoNotStripLogs {} diff --git a/build/android/java/src/org/chromium/build/annotations/IdentifierNameString.java b/build/android/java/src/org/chromium/build/annotations/IdentifierNameString.java new file mode 100644 index 000000000000..ca8b2df67655 --- /dev/null +++ b/build/android/java/src/org/chromium/build/annotations/IdentifierNameString.java @@ -0,0 +1,35 @@ +// Copyright 2020 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotation used to mark field that may contain Strings referring to fully qualified class names + * and methods whose arguments may be fully qualified class names. These classes may then be + * obfuscated by R8. A couple caveats when using this: + * - This only obfuscates the string, it does not actually check that the class exists. + * - If a field has this annotation, it must be non-final, otherwise javac will inline the constant + * and R8 won't obfuscate it. + * - Any field/method must be assigned/called with a String literal or a variable R8 can easily + * trace to a String literal. + * + *

Usage example:
+ * {@code + * @IdentifierNameString + * public static final String LOGGING_TAG = "com.google.android.apps.foo.FooActivity"; + * + * // In this example, both className and message are treated as identifier name strings, but will + * // only be obfuscated if the string points to a real class. + * @IdentifierNameString + * public void doSomeLogging(String className, String message) { ... } + * } + */ +@Target({ElementType.FIELD, ElementType.METHOD}) +@Retention(RetentionPolicy.CLASS) +public @interface IdentifierNameString {} diff --git a/build/android/java/src/org/chromium/build/annotations/MainDex.java b/build/android/java/src/org/chromium/build/annotations/MainDex.java new file mode 100644 index 000000000000..5eedb0b778ce --- /dev/null +++ b/build/android/java/src/org/chromium/build/annotations/MainDex.java @@ -0,0 +1,23 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Classes with native methods (contain @NativeMethods interfaces) that are used within renderer + * processes must be annotated with with @MainDex in order for their native methods work. + * + * Applies only for Chrome/ChromeModern (not needed for Monochrome+). + * + * For Cronet builds, which use a default_min_sdk_version of less than 21, this annotation also + * causes classes to appear in the main dex file (for "Legacy multidex"). + */ +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +public @interface MainDex {} diff --git a/build/android/java/src/org/chromium/build/annotations/MockedInTests.java b/build/android/java/src/org/chromium/build/annotations/MockedInTests.java new file mode 100644 index 000000000000..6b486f7becd5 --- /dev/null +++ b/build/android/java/src/org/chromium/build/annotations/MockedInTests.java @@ -0,0 +1,17 @@ +// Copyright 2020 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; + +/** + * See b/147584922. Proguard and Mockito don't play nicely together, and proguard rules make it + * impossible to keep the base class/interface for a mocked class without providing additional + * explicit information, like this annotation. This annotation should only need to be used on a + * class/interface that is extended/implemented by another class/interface that is then mocked. + */ +@Target(ElementType.TYPE) +public @interface MockedInTests {} diff --git a/build/android/java/src/org/chromium/build/annotations/UsedByReflection.java b/build/android/java/src/org/chromium/build/annotations/UsedByReflection.java new file mode 100644 index 000000000000..f28f38348e71 --- /dev/null +++ b/build/android/java/src/org/chromium/build/annotations/UsedByReflection.java @@ -0,0 +1,22 @@ +// Copyright 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; + +/** + * Annotation used for marking methods and fields that are called by reflection. + * Useful for keeping components that would otherwise be removed by Proguard. + * Use the value parameter to mention a file that calls this method. + * + * Note that adding this annotation to a method is not enough to guarantee that + * it is kept - either its class must be referenced elsewhere in the program, or + * the class must be annotated with this as well. + */ +@Target({ElementType.METHOD, ElementType.FIELD, ElementType.TYPE, ElementType.CONSTRUCTOR}) +public @interface UsedByReflection { + String value(); +} diff --git a/build/android/java/templates/BuildConfig.template b/build/android/java/templates/BuildConfig.template index 8953ad5ca177..cfecb6fefdcb 100644 --- a/build/android/java/templates/BuildConfig.template +++ b/build/android/java/templates/BuildConfig.template @@ -1,4 +1,4 @@ -// Copyright 2015 The Chromium Authors. All rights reserved. +// Copyright 2015 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -81,15 +81,15 @@ public class BuildConfig { public static MAYBE_FINAL boolean IS_INCREMENTAL_INSTALL MAYBE_FALSE; #endif -#if defined(_IS_CHROMECAST_BRANDING_INTERNAL) - public static MAYBE_FINAL boolean IS_CHROMECAST_BRANDING_INTERNAL = true; -#else - public static MAYBE_FINAL boolean IS_CHROMECAST_BRANDING_INTERNAL MAYBE_FALSE; -#endif - #if defined(_ISOLATED_SPLITS_ENABLED) public static MAYBE_FINAL boolean ISOLATED_SPLITS_ENABLED = true; #else public static MAYBE_FINAL boolean ISOLATED_SPLITS_ENABLED MAYBE_FALSE; #endif + +#if defined(_IS_FOR_TEST) + public static MAYBE_FINAL boolean IS_FOR_TEST = true; +#else + public static MAYBE_FINAL boolean IS_FOR_TEST MAYBE_FALSE; +#endif } diff --git a/build/android/java/templates/ProductConfig.template b/build/android/java/templates/ProductConfig.template index 4bc0d5296bb1..d6e1236bef54 100644 --- a/build/android/java/templates/ProductConfig.template +++ b/build/android/java/templates/ProductConfig.template @@ -1,4 +1,4 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -7,12 +7,10 @@ package PACKAGE; #if defined(USE_FINAL) #define MAYBE_FINAL final #define MAYBE_USE_CHROMIUM_LINKER = USE_CHROMIUM_LINKER_VALUE -#define MAYBE_USE_MODERN_LINKER = USE_MODERN_LINKER_VALUE #define MAYBE_IS_BUNDLE = IS_BUNDLE_VALUE #else #define MAYBE_FINAL #define MAYBE_USE_CHROMIUM_LINKER -#define MAYBE_USE_MODERN_LINKER #define MAYBE_IS_BUNDLE #endif @@ -29,6 +27,5 @@ public class ProductConfig { #endif public static MAYBE_FINAL boolean USE_CHROMIUM_LINKER MAYBE_USE_CHROMIUM_LINKER; - public static MAYBE_FINAL boolean USE_MODERN_LINKER MAYBE_USE_MODERN_LINKER; public static MAYBE_FINAL boolean IS_BUNDLE MAYBE_IS_BUNDLE; } diff --git a/build/android/java/test/DefaultLocaleLintTest.java b/build/android/java/test/DefaultLocaleLintTest.java index 21934299de4b..76f9ea53681d 100644 --- a/build/android/java/test/DefaultLocaleLintTest.java +++ b/build/android/java/test/DefaultLocaleLintTest.java @@ -1,4 +1,4 @@ -// Copyright 2021 The Chromium Authors. All rights reserved. +// Copyright 2021 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/java/test/NewApiLintTest.java b/build/android/java/test/NewApiLintTest.java index 6c68dd8b9adc..66d576a436c7 100644 --- a/build/android/java/test/NewApiLintTest.java +++ b/build/android/java/test/NewApiLintTest.java @@ -1,4 +1,4 @@ -// Copyright 2021 The Chromium Authors. All rights reserved. +// Copyright 2021 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/java/test/NoSignatureChangeIncrementalJavacTestHelper.template b/build/android/java/test/NoSignatureChangeIncrementalJavacTestHelper.template new file mode 100644 index 000000000000..b51a67dc539a --- /dev/null +++ b/build/android/java/test/NoSignatureChangeIncrementalJavacTestHelper.template @@ -0,0 +1,18 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package test; + +public class NoSignatureChangeIncrementalJavacTestHelper { + private NoSignatureChangeIncrementalJavacTestHelper2 mHelper2 = + new NoSignatureChangeIncrementalJavacTestHelper2(); + + public String foo() { + return "{{foo_return_value}}"; + } + + public String bar() { + return mHelper2.bar(); + } +} diff --git a/build/android/java/test/NoSignatureChangeIncrementalJavacTestHelper2.java b/build/android/java/test/NoSignatureChangeIncrementalJavacTestHelper2.java new file mode 100644 index 000000000000..9694f3f1cdc5 --- /dev/null +++ b/build/android/java/test/NoSignatureChangeIncrementalJavacTestHelper2.java @@ -0,0 +1,11 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package test; + +public class NoSignatureChangeIncrementalJavacTestHelper2 { + public String bar() { + return "bar"; + } +} diff --git a/build/android/java/test/missing_symbol/B.java b/build/android/java/test/missing_symbol/B.java new file mode 100644 index 000000000000..639a744b33ca --- /dev/null +++ b/build/android/java/test/missing_symbol/B.java @@ -0,0 +1,9 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package test.missing_symbol; + +public class B { + public void foo() {} +} diff --git a/build/android/java/test/missing_symbol/D.template b/build/android/java/test/missing_symbol/D.template new file mode 100644 index 000000000000..3f7eef3d1eab --- /dev/null +++ b/build/android/java/test/missing_symbol/D.template @@ -0,0 +1,9 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package test.missing_symbol; + +public class D { + public void foo() {} +} diff --git a/build/android/java/test/missing_symbol/Importer.template b/build/android/java/test/missing_symbol/Importer.template new file mode 100644 index 000000000000..a1fd881316b4 --- /dev/null +++ b/build/android/java/test/missing_symbol/Importer.template @@ -0,0 +1,13 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package _IMPORTER_PACKAGE; + +import _IMPORTEE_PACKAGE._IMPORTEE_CLASS_NAME; + +public class Importer { + public Importer() { + new _IMPORTEE_CLASS_NAME().foo(); + } +} diff --git a/build/android/java/test/missing_symbol/ImportsSubB.java b/build/android/java/test/missing_symbol/ImportsSubB.java new file mode 100644 index 000000000000..2422b4add60a --- /dev/null +++ b/build/android/java/test/missing_symbol/ImportsSubB.java @@ -0,0 +1,13 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package test.missing_symbol; + +import test.missing_symbol.sub.SubB; + +public class ImportsSubB { + public ImportsSubB() { + new SubB().foo(); + } +} diff --git a/build/android/java/test/missing_symbol/c.jar b/build/android/java/test/missing_symbol/c.jar new file mode 100644 index 0000000000000000000000000000000000000000..5f30be80a773500fa5c009ffd5fcc8b2bc26acff GIT binary patch literal 393 zcmWIWW@Zs#U|`^2kes3+d1pz9dEOlb_U|KakpCkRY^c z=ZPZyiIHlTGv?o&6mWE&|BESd`h1s8M7O6Go-k}UCO>6iFgG*i)q*DuEm)i-M{wOn(*m!^#i;andF#pg^2_(WFTNcBZ$TuLadMwLJOS$Z&o&t ORz@I91kyr`ARYi{lZCDT literal 0 HcmV?d00001 diff --git a/build/android/java/test/missing_symbol/sub/BInMethodSignature.java b/build/android/java/test/missing_symbol/sub/BInMethodSignature.java new file mode 100644 index 000000000000..36b6ba253fa3 --- /dev/null +++ b/build/android/java/test/missing_symbol/sub/BInMethodSignature.java @@ -0,0 +1,13 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package test.missing_symbol.sub; + +import test.missing_symbol.B; + +public class BInMethodSignature { + public B foo() { + return new B(); + } +} diff --git a/build/android/java/test/missing_symbol/sub/SubB.java b/build/android/java/test/missing_symbol/sub/SubB.java new file mode 100644 index 000000000000..1e583786417c --- /dev/null +++ b/build/android/java/test/missing_symbol/sub/SubB.java @@ -0,0 +1,9 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package test.missing_symbol.sub; + +public class SubB { + public void foo() {} +} diff --git a/build/android/junit/AndroidManifest_mergetest.xml b/build/android/junit/AndroidManifest_mergetest.xml new file mode 100644 index 000000000000..2541b8d7c6de --- /dev/null +++ b/build/android/junit/AndroidManifest_mergetest.xml @@ -0,0 +1,12 @@ + + + + + + + + diff --git a/build/android/junit/res/values/strings.xml b/build/android/junit/res/values/strings.xml new file mode 100644 index 000000000000..9b9c0787b8f8 --- /dev/null +++ b/build/android/junit/res/values/strings.xml @@ -0,0 +1,8 @@ + + + + Hello World + diff --git a/build/android/junit/src/org/chromium/build/AndroidAssetsTest.java b/build/android/junit/src/org/chromium/build/AndroidAssetsTest.java new file mode 100644 index 000000000000..8ff149e90965 --- /dev/null +++ b/build/android/junit/src/org/chromium/build/AndroidAssetsTest.java @@ -0,0 +1,58 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build; + +import android.content.Context; +import android.content.pm.ApplicationInfo; +import android.content.pm.PackageManager; +import android.content.pm.PackageManager.NameNotFoundException; + +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.robolectric.RobolectricTestRunner; +import org.robolectric.RuntimeEnvironment; + +import java.io.IOException; +import java.io.InputStream; + +/** + * Checks that Robolectric tests can use android assets. + */ +@RunWith(RobolectricTestRunner.class) +public class AndroidAssetsTest { + private static final String TEST_ASSET_NAME = "AndroidAssetsTest.java"; + + public String readTestAsset() throws IOException { + try (InputStream stream = + RuntimeEnvironment.getApplication().getAssets().open(TEST_ASSET_NAME)) { + byte[] buffer = new byte[stream.available()]; + stream.read(buffer); + return new String(buffer); + } + } + + @Test + public void testAssetsExist() throws IOException { + String myselfAsAssetData = readTestAsset(); + Assert.assertTrue("asset not correct. It had length=" + myselfAsAssetData.length(), + myselfAsAssetData.contains("String myselfAsAssetData = ")); + } + + @Test + public void testResourcesExist() { + String actual = RuntimeEnvironment.getApplication().getString(R.string.test_string); + Assert.assertEquals("Hello World", actual); + } + + @Test + public void testManifestMerged() throws NameNotFoundException { + Context context = RuntimeEnvironment.getApplication(); + ApplicationInfo info = context.getPackageManager().getApplicationInfo( + context.getPackageName(), PackageManager.GET_META_DATA); + String actual = info.metaData.getString("test-metadata"); + Assert.assertEquals("Hello World", actual); + } +} diff --git a/build/android/junit/src/org/chromium/build/IncrementalJavacTest.java b/build/android/junit/src/org/chromium/build/IncrementalJavacTest.java new file mode 100644 index 000000000000..b15b7df26f4b --- /dev/null +++ b/build/android/junit/src/org/chromium/build/IncrementalJavacTest.java @@ -0,0 +1,33 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.robolectric.RobolectricTestRunner; +import org.robolectric.annotation.Config; + +import test.NoSignatureChangeIncrementalJavacTestHelper; + +/** + * Checks that build picked up changes to + * {@link NoSignatureChangeIncrementalJavacTestHelper#foo()}. + */ +@RunWith(RobolectricTestRunner.class) +@Config(manifest = Config.NONE) +public final class IncrementalJavacTest { + @Test + public void testNoSignatureChange() { + NoSignatureChangeIncrementalJavacTestHelper helper = + new NoSignatureChangeIncrementalJavacTestHelper(); + // #foo() should return updated value. + assertEquals("foo2", helper.foo()); + + // #bar() should not crash. + assertEquals("bar", helper.bar()); + } +} diff --git a/build/android/lighttpd_server.py b/build/android/lighttpd_server.py index 42fbcdbe6925..9950253a6b8c 100755 --- a/build/android/lighttpd_server.py +++ b/build/android/lighttpd_server.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -10,11 +10,9 @@ lighttpd_server PATH_TO_DOC_ROOT """ -from __future__ import print_function import codecs import contextlib -import httplib import os import random import shutil @@ -24,10 +22,14 @@ import tempfile import time +from six.moves import http_client +from six.moves import input # pylint: disable=redefined-builtin + from pylib import constants from pylib import pexpect -class LighttpdServer(object): + +class LighttpdServer: """Wraps lighttpd server, providing robust startup. Args: @@ -122,11 +124,12 @@ def ShutdownHttpServer(self): def _TestServerConnection(self): # Wait for server to start server_msg = '' - for timeout in xrange(1, 5): + for timeout in range(1, 5): client_error = None try: - with contextlib.closing(httplib.HTTPConnection( - '127.0.0.1', self.port, timeout=timeout)) as http: + with contextlib.closing( + http_client.HTTPConnection('127.0.0.1', self.port, + timeout=timeout)) as http: http.set_debuglevel(timeout > 3) http.request('HEAD', '/') r = http.getresponse() @@ -137,7 +140,7 @@ def _TestServerConnection(self): client_error = ('Bad response: %s %s version %s\n ' % (r.status, r.reason, r.version) + '\n '.join([': '.join(h) for h in r.getheaders()])) - except (httplib.HTTPException, socket.error) as client_error: + except (http_client.HTTPException, socket.error) as client_error: pass # Probably too quick connecting: try again # Check for server startup error messages # pylint: disable=no-member @@ -248,8 +251,8 @@ def main(argv): server = LighttpdServer(*argv[1:]) try: if server.StartupHttpServer(): - raw_input('Server running at http://127.0.0.1:%s -' - ' press Enter to exit it.' % server.port) + input('Server running at http://127.0.0.1:%s -' + ' press Enter to exit it.' % server.port) else: print('Server exit code:', server.process.exitstatus) finally: diff --git a/build/android/list_class_verification_failures.py b/build/android/list_class_verification_failures.py index 508e8312270d..9c94e308aa61 100755 --- a/build/android/list_class_verification_failures.py +++ b/build/android/list_class_verification_failures.py @@ -1,5 +1,5 @@ -#!/usr/bin/env vpython -# Copyright 2018 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -9,10 +9,10 @@ and accommodating API-level-specific details, such as file paths. """ -from __future__ import print_function + import argparse -import exceptions +import dataclasses # pylint: disable=wrong-import-order import logging import os import re @@ -63,12 +63,10 @@ def DetermineDeviceToUse(devices): class DeviceOSError(Exception): """Raised when a file is missing from the device, or something similar.""" - pass class UnsupportedDeviceError(Exception): """Raised when the device is not supported by this script.""" - pass def _GetFormattedArch(device): @@ -77,63 +75,61 @@ def _GetFormattedArch(device): return {abis.ARM_64: 'arm64', abis.ARM: 'arm'}.get(abi, abi) -def PathToDexForPlatformVersion(device, package_name): - """Gets the full path to the dex file on the device.""" +def FindOdexFiles(device, package_name): + """Gets the full paths to the dex files on the device.""" sdk_level = device.build_version_sdk paths_to_apk = device.GetApplicationPaths(package_name) if not paths_to_apk: raise DeviceOSError( 'Could not find data directory for {}. Is it installed?'.format( package_name)) - if len(paths_to_apk) != 1: - raise DeviceOSError( - 'Expected exactly one path for {} but found {}'.format( - package_name, - paths_to_apk)) - path_to_apk = paths_to_apk[0] - - if version_codes.LOLLIPOP <= sdk_level <= version_codes.LOLLIPOP_MR1: - # Of the form "com.example.foo-\d", where \d is some digit (usually 1 or 2) - package_with_suffix = os.path.basename(os.path.dirname(path_to_apk)) - arch = _GetFormattedArch(device) - dalvik_prefix = '/data/dalvik-cache/{arch}'.format(arch=arch) - odex_file = '{prefix}/data@app@{package}@base.apk@classes.dex'.format( - prefix=dalvik_prefix, - package=package_with_suffix) - elif sdk_level >= version_codes.MARSHMALLOW: - arch = _GetFormattedArch(device) - odex_file = '{data_dir}/oat/{arch}/base.odex'.format( - data_dir=os.path.dirname(path_to_apk), arch=arch) - else: - raise UnsupportedDeviceError('Unsupported API level: {}'.format(sdk_level)) - odex_file_exists = device.FileExists(odex_file) - if odex_file_exists: - return odex_file - elif sdk_level >= version_codes.PIE: - raise DeviceOSError( - 'Unable to find odex file: you must run dex2oat on debuggable apps ' - 'on >= P after installation.') - raise DeviceOSError('Unable to find odex file ' + odex_file) + ret = [] + for path_to_apk in paths_to_apk: + if version_codes.LOLLIPOP <= sdk_level <= version_codes.LOLLIPOP_MR1: + # Of the form "com.example.foo-\d", where \d is a digit (usually 1 or 2). + package_with_suffix = os.path.basename(os.path.dirname(path_to_apk)) + arch = _GetFormattedArch(device) + dalvik_prefix = '/data/dalvik-cache/{arch}'.format(arch=arch) + odex_file = '{prefix}/data@app@{package}@base.apk@classes.dex'.format( + prefix=dalvik_prefix, package=package_with_suffix) + elif sdk_level >= version_codes.MARSHMALLOW: + arch = _GetFormattedArch(device) + odex_file = '{data_dir}/oat/{arch}/base.odex'.format( + data_dir=os.path.dirname(path_to_apk), arch=arch) + else: + raise UnsupportedDeviceError( + 'Unsupported API level: {}'.format(sdk_level)) + + odex_file_exists = device.FileExists(odex_file) + if odex_file_exists: + ret.append(odex_file) + elif sdk_level >= version_codes.PIE: + raise DeviceOSError( + 'Unable to find odex file: you must run dex2oat on debuggable apps ' + 'on >= P after installation.') + else: + raise DeviceOSError('Unable to find odex file ' + odex_file) + return ret -def _AdbOatDumpForPackage(device, package_name, out_file): +def _AdbOatDump(device, odex_file, out_file): """Runs oatdump on the device.""" # Get the path to the odex file. - odex_file = PathToDexForPlatformVersion(device, package_name) - device.RunShellCommand( - ['oatdump', '--oat-file=' + odex_file, '--output=' + out_file], - timeout=420, - shell=True, - check_return=True) + with device_temp_file.DeviceTempFile(device.adb) as device_file: + device.RunShellCommand( + ['oatdump', '--oat-file=' + odex_file, '--output=' + device_file.name], + timeout=420, + shell=True, + check_return=True) + device.PullFile(device_file.name, out_file, timeout=220) -class JavaClass(object): +@dataclasses.dataclass(order=True, frozen=True) +class JavaClass: """This represents a Java Class and its ART Class Verification status.""" - - def __init__(self, name, verification_status): - self.name = name - self.verification_status = verification_status + name: str + verification_status: str def _ParseMappingFile(proguard_map_file): @@ -158,11 +154,10 @@ def FormatJavaClassName(dex_code_name, proguard_mappings): obfuscated_name = dex_code_name.replace('/', '.') if proguard_mappings is not None: return _DeobfuscateJavaClassName(obfuscated_name, proguard_mappings) - else: - return obfuscated_name + return obfuscated_name -def ListClassesAndVerificationStatus(oatdump_output, proguard_mappings): +def ParseOatdump(oatdump_output, proguard_mappings): """Lists all Java classes in the dex along with verification status.""" java_classes = [] pattern = re.compile(r'\d+: L([^;]+).*\(type_idx=[^(]+\((\w+)\).*') @@ -189,10 +184,9 @@ def _PrintVerificationResults(target_status, java_classes, show_summary): if java_class.verification_status == target_status: print(java_class.name) if java_class.verification_status not in d: - raise exceptions.RuntimeError('Unexpected status: {0}'.format( + raise RuntimeError('Unexpected status: {0}'.format( java_class.verification_status)) - else: - d[java_class.verification_status] += 1 + d[java_class.verification_status] += 1 if show_summary: for status in d: @@ -205,18 +199,20 @@ def _PrintVerificationResults(target_status, java_classes, show_summary): def RealMain(mapping, device_arg, package, status, hide_summary, workdir): if mapping is None: - logging.warn('Skipping deobfuscation because no map file was provided.') + logging.warning('Skipping deobfuscation because no map file was provided.') + proguard_mappings = None + else: + proguard_mappings = _ParseMappingFile(mapping) device = DetermineDeviceToUse(device_arg) + host_tempfile = os.path.join(workdir, 'out.dump') device.EnableRoot() - with device_temp_file.DeviceTempFile( - device.adb) as file_on_device: - _AdbOatDumpForPackage(device, package, file_on_device.name) - file_on_host = os.path.join(workdir, 'out.dump') - device.PullFile(file_on_device.name, file_on_host, timeout=220) - proguard_mappings = (_ParseMappingFile(mapping) if mapping else None) - with open(file_on_host, 'r') as f: - java_classes = ListClassesAndVerificationStatus(f, proguard_mappings) - _PrintVerificationResults(status, java_classes, not hide_summary) + odex_files = FindOdexFiles(device, package) + java_classes = set() + for odex_file in odex_files: + _AdbOatDump(device, odex_file, host_tempfile) + with open(host_tempfile, 'r') as f: + java_classes.update(ParseOatdump(f, proguard_mappings)) + _PrintVerificationResults(status, sorted(java_classes), not hide_summary) def main(): @@ -271,8 +267,8 @@ def main(): RealMain(args.mapping, args.devices, args.package, args.status, args.hide_summary, args.workdir) # Assume the user wants the workdir to persist (useful for debugging). - logging.warn('Not cleaning up explicitly-specified workdir: %s', - args.workdir) + logging.warning('Not cleaning up explicitly-specified workdir: %s', + args.workdir) else: with tempfile_ext.NamedTemporaryDirectory() as workdir: RealMain(args.mapping, args.devices, args.package, args.status, diff --git a/build/android/list_class_verification_failures_test.py b/build/android/list_class_verification_failures_test.py old mode 100644 new mode 100755 index 4248064c9da7..149943664233 --- a/build/android/list_class_verification_failures_test.py +++ b/build/android/list_class_verification_failures_test.py @@ -1,4 +1,5 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -76,7 +77,7 @@ def testPathToDexForPlatformVersion_noPaths(self): device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk) with self.assertRaises(list_verification.DeviceOSError) as cm: - list_verification.PathToDexForPlatformVersion(device, package_name) + list_verification.FindOdexFiles(device, package_name) message = str(cm.exception) self.assertIn('Could not find data directory', message) @@ -89,10 +90,11 @@ def testPathToDexForPlatformVersion_multiplePaths(self): device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch) device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk) - with self.assertRaises(list_verification.DeviceOSError) as cm: - list_verification.PathToDexForPlatformVersion(device, package_name) - message = str(cm.exception) - self.assertIn('Expected exactly one path for', message) + odex_files = list_verification.FindOdexFiles(device, package_name) + self.assertEqual(odex_files, [ + '/data/dalvik-cache/arm64/data@app@first@base.apk@classes.dex', + '/data/dalvik-cache/arm64/data@app@second@base.apk@classes.dex' + ]) def testPathToDexForPlatformVersion_dalvikApiLevel(self): sdk_int = version_codes.KITKAT @@ -104,7 +106,7 @@ def testPathToDexForPlatformVersion_dalvikApiLevel(self): device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk) with self.assertRaises(list_verification.UnsupportedDeviceError) as _: - list_verification.PathToDexForPlatformVersion(device, package_name) + list_verification.FindOdexFiles(device, package_name) def testPathToDexForPlatformVersion_lollipopArm(self): sdk_int = version_codes.LOLLIPOP @@ -116,11 +118,10 @@ def testPathToDexForPlatformVersion_lollipopArm(self): device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk) device.FileExists = mock.MagicMock(return_value=True) - odex_file = list_verification.PathToDexForPlatformVersion(device, - package_name) - self.assertEqual(odex_file, - ('/data/dalvik-cache/arm/data@app' - '@package.name-1@base.apk@classes.dex')) + odex_files = list_verification.FindOdexFiles(device, package_name) + self.assertEqual( + odex_files, + ['/data/dalvik-cache/arm/data@app@package.name-1@base.apk@classes.dex']) def testPathToDexForPlatformVersion_mashmallowArm(self): sdk_int = version_codes.MARSHMALLOW @@ -132,10 +133,9 @@ def testPathToDexForPlatformVersion_mashmallowArm(self): device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk) device.FileExists = mock.MagicMock(return_value=True) - odex_file = list_verification.PathToDexForPlatformVersion(device, - package_name) - self.assertEqual(odex_file, - '/some/path/package.name-1/oat/arm/base.odex') + odex_files = list_verification.FindOdexFiles(device, package_name) + self.assertEqual(odex_files, + ['/some/path/package.name-1/oat/arm/base.odex']) def testPathToDexForPlatformVersion_mashmallowArm64(self): sdk_int = version_codes.MARSHMALLOW @@ -147,10 +147,9 @@ def testPathToDexForPlatformVersion_mashmallowArm64(self): device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk) device.FileExists = mock.MagicMock(return_value=True) - odex_file = list_verification.PathToDexForPlatformVersion(device, - package_name) - self.assertEqual(odex_file, - '/some/path/package.name-1/oat/arm64/base.odex') + odex_files = list_verification.FindOdexFiles(device, package_name) + self.assertEqual(odex_files, + ['/some/path/package.name-1/oat/arm64/base.odex']) def testPathToDexForPlatformVersion_pieNoOdexFile(self): sdk_int = version_codes.PIE @@ -163,7 +162,7 @@ def testPathToDexForPlatformVersion_pieNoOdexFile(self): device.FileExists = mock.MagicMock(return_value=False) with self.assertRaises(list_verification.DeviceOSError) as cm: - list_verification.PathToDexForPlatformVersion(device, package_name) + list_verification.FindOdexFiles(device, package_name) message = str(cm.exception) self.assertIn('you must run dex2oat on debuggable apps on >= P', message) @@ -178,7 +177,7 @@ def testPathToDexForPlatformVersion_lowerApiLevelNoOdexFile(self): device.FileExists = mock.MagicMock(return_value=False) with self.assertRaises(list_verification.DeviceOSError) as _: - list_verification.PathToDexForPlatformVersion(device, package_name) + list_verification.FindOdexFiles(device, package_name) def testListClasses_noProguardMap(self): oatdump_output = [ @@ -187,8 +186,7 @@ def testListClasses_noProguardMap(self): 'StatusRetryVerificationAtRuntime'), ] - classes = list_verification.ListClassesAndVerificationStatus(oatdump_output, - None) + classes = list_verification.ParseOatdump(oatdump_output, None) self.assertEqual(2, len(classes)) java_class_1 = _ClassForName('a.b.JavaClass1', classes) java_class_2 = _ClassForName('a.b.JavaClass2', classes) @@ -207,8 +205,7 @@ def testListClasses_proguardMap(self): 'a.b.ObfuscatedJavaClass1': 'a.b.JavaClass1', 'a.b.ObfuscatedJavaClass2': 'a.b.JavaClass2', } - classes = list_verification.ListClassesAndVerificationStatus(oatdump_output, - mapping) + classes = list_verification.ParseOatdump(oatdump_output, mapping) self.assertEqual(2, len(classes)) java_class_1 = _ClassForName('a.b.JavaClass1', classes) java_class_2 = _ClassForName('a.b.JavaClass2', classes) @@ -222,8 +219,7 @@ def testListClasses_noStatusPrefix(self): _CreateOdexLine('a.b.JavaClass2', 7, 'RetryVerificationAtRuntime'), ] - classes = list_verification.ListClassesAndVerificationStatus(oatdump_output, - None) + classes = list_verification.ParseOatdump(oatdump_output, None) self.assertEqual(2, len(classes)) java_class_1 = _ClassForName('a.b.JavaClass1', classes) java_class_2 = _ClassForName('a.b.JavaClass2', classes) diff --git a/build/android/list_java_targets.py b/build/android/list_java_targets.py index d0689a6e0c8a..b135b0fcab10 100755 --- a/build/android/list_java_targets.py +++ b/build/android/list_java_targets.py @@ -1,5 +1,5 @@ -#!/usr/bin/env vpython3 -# Copyright 2020 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -28,6 +28,7 @@ import json import logging import os +import shutil import subprocess import sys @@ -48,25 +49,49 @@ 'java_annotation_processor', 'java_binary', 'java_library', - 'junit_binary', + 'robolectric_binary', 'system_java_library', ) -def _run_ninja(output_dir, args): - cmd = [ - 'autoninja', +def _resolve_ninja(): + # Prefer the version on PATH, but fallback to known version if PATH doesn't + # have one (e.g. on bots). + if shutil.which('ninja') is None: + return os.path.join(_SRC_ROOT, 'third_party', 'ninja', 'ninja') + return 'ninja' + + +def _resolve_autoninja(): + # Prefer the version on PATH, but fallback to known version if PATH doesn't + # have one (e.g. on bots). + if shutil.which('autoninja') is None: + return os.path.join(_SRC_ROOT, 'third_party', 'depot_tools', 'autoninja') + return 'autoninja' + + +def _run_ninja(output_dir, args, j_value=None, quiet=False): + if j_value: + cmd = [_resolve_ninja(), '-j', j_value] + else: + cmd = [_resolve_autoninja()] + cmd += [ '-C', output_dir, ] cmd.extend(args) logging.info('Running: %r', cmd) - subprocess.run(cmd, check=True, stdout=sys.stderr) + if quiet: + subprocess.run(cmd, check=True, capture_output=True) + else: + subprocess.run(cmd, check=True, stdout=sys.stderr) def _query_for_build_config_targets(output_dir): # Query ninja rather than GN since it's faster. - cmd = ['ninja', '-C', output_dir, '-t', 'targets'] + # Use ninja rather than autoninja to avoid extra output if user has set the + # NINJA_SUMMARIZE_BUILD environment variable. + cmd = [_resolve_ninja(), '-C', output_dir, '-t', 'targets'] logging.info('Running: %r', cmd) ninja_output = subprocess.run(cmd, check=True, @@ -83,7 +108,45 @@ def _query_for_build_config_targets(output_dir): return ret -class _TargetEntry(object): +def _query_json(*, json_dict: dict, query: str, path: str): + """Traverses through the json dictionary according to the query. + + If at any point a key does not exist, return the empty string, but raise an + error if a key exists but is the wrong type. + + This is roughly equivalent to returning + json_dict[queries[0]]?[queries[1]]?...[queries[N]]? where the ? means that if + the key doesn't exist, the empty string is returned. + + Example: + Given json_dict = {'a': {'b': 'c'}} + - If queries = ['a', 'b'] + Return: 'c' + - If queries = ['a', 'd'] + Return '' + - If queries = ['x'] + Return '' + - If queries = ['a', 'b', 'x'] + Raise an error since json_dict['a']['b'] is the string 'c' instead of an + expected dict that can be indexed into. + + Returns the final result after exhausting all the queries. + """ + queries = query.split('.') + value = json_dict + try: + for key in queries: + value = value.get(key) + if value is None: + return '' + except AttributeError as e: + raise Exception( + f'Failed when attempting to get {queries} from {path}') from e + return value + + +class _TargetEntry: + def __init__(self, gn_target): assert gn_target.startswith('//'), f'{gn_target} does not start with //' assert ':' in gn_target, f'Non-root {gn_target} required' @@ -100,23 +163,23 @@ def ninja_build_config_target(self): @property def build_config_path(self): - """Returns the filepath of the project's .build_config.""" + """Returns the filepath of the project's .build_config.json.""" ninja_target = self.ninja_target # Support targets at the root level. e.g. //:foo if ninja_target[0] == ':': ninja_target = ninja_target[1:] - subpath = ninja_target.replace(':', os.path.sep) + '.build_config' + subpath = ninja_target.replace(':', os.path.sep) + '.build_config.json' return os.path.join(constants.GetOutDirectory(), 'gen', subpath) def build_config(self): - """Reads and returns the project's .build_config JSON.""" + """Reads and returns the project's .build_config.json JSON.""" if not self._build_config: with open(self.build_config_path) as jsonfile: self._build_config = json.load(jsonfile) return self._build_config def get_type(self): - """Returns the target type from its .build_config.""" + """Returns the target type from its .build_config.json.""" return self.build_config()['deps_info']['type'] def proguard_enabled(self): @@ -145,12 +208,13 @@ def main(): parser.add_argument('--print-types', action='store_true', help='Print type of each target') - parser.add_argument('--print-build-config-paths', - action='store_true', - help='Print path to the .build_config of each target') + parser.add_argument( + '--print-build-config-paths', + action='store_true', + help='Print path to the .build_config.json of each target') parser.add_argument('--build', action='store_true', - help='Build all .build_config files.') + help='Build all .build_config.json files.') parser.add_argument('--type', action='append', help='Restrict to targets of given type', @@ -160,14 +224,22 @@ def main(): help='Print counts of each target type.') parser.add_argument('--proguard-enabled', action='store_true', - help='Restrict to targets that have proguard enabled') + help='Restrict to targets that have proguard enabled.') + parser.add_argument('--query', + help='A dot separated string specifying a query for a ' + 'build config json value of each target. Example: Use ' + '--query deps_info.unprocessed_jar_path to show a list ' + 'of all targets that have a non-empty deps_info dict and ' + 'non-empty "unprocessed_jar_path" value in that dict.') + parser.add_argument('-j', help='Use -j with ninja instead of autoninja.') parser.add_argument('-v', '--verbose', default=0, action='count') + parser.add_argument('-q', '--quiet', default=0, action='count') args = parser.parse_args() args.build |= bool(args.type or args.proguard_enabled or args.print_types - or args.stats) + or args.stats or args.query) - logging.basicConfig(level=logging.WARNING - (10 * args.verbose), + logging.basicConfig(level=logging.WARNING + 10 * (args.quiet - args.verbose), format='%(levelname).1s %(relativeCreated)6d %(message)s') if args.output_directory: @@ -180,8 +252,10 @@ def main(): entries = [_TargetEntry(t) for t in targets] if args.build: - logging.warning('Building %d .build_config files...', len(entries)) - _run_ninja(output_dir, [e.ninja_build_config_target for e in entries]) + logging.warning('Building %d .build_config.json files...', len(entries)) + _run_ninja(output_dir, [e.ninja_build_config_target for e in entries], + j_value=args.j, + quiet=args.quiet) if args.type: entries = [e for e in entries if e.get_type() in args.type] @@ -208,6 +282,13 @@ def main(): to_print = f'{to_print}: {e.get_type()}' elif args.print_build_config_paths: to_print = f'{to_print}: {e.build_config_path}' + elif args.query: + value = _query_json(json_dict=e.build_config(), + query=args.query, + path=e.build_config_path) + if not value: + continue + to_print = f'{to_print}: {value}' print(to_print) diff --git a/build/android/main_dex_classes.flags b/build/android/main_dex_classes.flags index 31dbdd619ea0..7e0475634e57 100644 --- a/build/android/main_dex_classes.flags +++ b/build/android/main_dex_classes.flags @@ -1,16 +1,16 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # Proguard flags for what should be kept in the main dex. Only used # during main dex list determination, not during actual proguarding. --keep @org.chromium.base.annotations.MainDex class * { +-keep @org.chromium.build.annotations.MainDex class * { *; } -keepclasseswithmembers class * { - @org.chromium.base.annotations.MainDex ; + @org.chromium.build.annotations.MainDex ; } # Assume all IDL-generated classes should be kept. They can't reference other @@ -29,11 +29,6 @@ *; } -# Used by tests for secondary dex extraction. --keep class android.support.v4.content.ContextCompat { - *; -} - # The following are based on $SDK_BUILD_TOOLS/mainDexClasses.rules # Ours differ in that: # 1. It omits -keeps for application / instrumentation / backupagents (these are diff --git a/build/android/method_count.py b/build/android/method_count.py index a39a390cf87f..8556b22c801a 100755 --- a/build/android/method_count.py +++ b/build/android/method_count.py @@ -1,9 +1,8 @@ -#! /usr/bin/env python -# Copyright 2015 The Chromium Authors. All rights reserved. +#! /usr/bin/env python3 +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import print_function import argparse import os @@ -13,7 +12,7 @@ from pylib.dex import dex_parser -class DexStatsCollector(object): +class DexStatsCollector: """Tracks count of method/field/string/type as well as unique methods.""" def __init__(self): diff --git a/build/android/multidex.flags b/build/android/multidex.flags deleted file mode 100644 index e3543c132418..000000000000 --- a/build/android/multidex.flags +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# When multidex is enabled, need to keep the @MainDex annotation so that it -# can be used to create the main dex list. --keepattributes *Annotations* --keep @interface org.chromium.base.annotations.MainDex diff --git a/build/android/native_flags/BUILD.gn b/build/android/native_flags/BUILD.gn index 9c5be70ffd7a..317103026163 100644 --- a/build/android/native_flags/BUILD.gn +++ b/build/android/native_flags/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2021 The Chromium Authors. All rights reserved. +# Copyright 2021 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/native_flags/argcapture.py b/build/android/native_flags/argcapture.py index 159b03ab887c..b590fff207ef 100755 --- a/build/android/native_flags/argcapture.py +++ b/build/android/native_flags/argcapture.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2021 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2021 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Writes arguments to a file.""" diff --git a/build/android/native_flags/empty.cc b/build/android/native_flags/empty.cc index 94aac140fbd3..29dfc78a94ac 100644 --- a/build/android/native_flags/empty.cc +++ b/build/android/native_flags/empty.cc @@ -1,4 +1,4 @@ -// Copyright 2021 The Chromium Authors. All rights reserved. +// Copyright 2021 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/print_cipd_version.py b/build/android/print_cipd_version.py new file mode 100755 index 000000000000..581295dcdbac --- /dev/null +++ b/build/android/print_cipd_version.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import pathlib +import re +import subprocess + +_DIR_SOURCE_ROOT = str(pathlib.Path(__file__).absolute().parents[2]) + + +def main(): + parser = argparse.ArgumentParser() + # Hide args set by wrappers so that using --help with the wrappers does not + # show them. + parser.add_argument('--subdir', required=True, help=argparse.SUPPRESS) + parser.add_argument('--cipd-package', required=True, help=argparse.SUPPRESS) + parser.add_argument('--git-log-url', help=argparse.SUPPRESS) + parser.add_argument('--cipd-instance', help='Uses value from DEPS by default') + args = parser.parse_args() + + if not args.cipd_instance: + args.cipd_instance = subprocess.check_output( + ['gclient', 'getdep', '-r', f'src/{args.subdir}:{args.cipd_package}'], + cwd=_DIR_SOURCE_ROOT, + text=True) + + cmd = ['cipd', 'describe', args.cipd_package, '-version', args.cipd_instance] + print(' '.join(cmd)) + output = subprocess.check_output(cmd, text=True) + print(output, end='') + if args.git_log_url: + git_hashes = re.findall(r'version:.*?@(\w+)', output) + if not git_hashes: + print('Could not find git hash from output.') + else: + # Multiple version tags exist when multiple versions have the same sha1. + last_version = git_hashes[-1] + print() + print('Recent commits:', args.git_log_url.format(last_version)) + + +if __name__ == '__main__': + main() diff --git a/build/android/provision_devices.py b/build/android/provision_devices.py index 5fb4d93d4900..428d9b3d5083 100755 --- a/build/android/provision_devices.py +++ b/build/android/provision_devices.py @@ -1,6 +1,6 @@ -#!/usr/bin/env vpython +#!/usr/bin/env vpython3 # -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -46,7 +46,7 @@ _TOMBSTONE_REGEX = re.compile('tombstone.*') -class _DEFAULT_TIMEOUTS(object): +class _DEFAULT_TIMEOUTS: # L can take a while to reboot after a wipe. LOLLIPOP = 600 PRE_LOLLIPOP = 180 @@ -54,7 +54,7 @@ class _DEFAULT_TIMEOUTS(object): HELP_TEXT = '{}s on L, {}s on pre-L'.format(LOLLIPOP, PRE_LOLLIPOP) -class _PHASES(object): +class _PHASES: WIPE = 'wipe' PROPERTIES = 'properties' FINISH = 'finish' @@ -67,7 +67,7 @@ def ProvisionDevices(args): if args.denylist_file else None) devices = [ d for d in device_utils.DeviceUtils.HealthyDevices(denylist) - if not args.emulators or d.adb.is_emulator + if not args.emulators or d.is_emulator ] if args.device: devices = [d for d in devices if d == args.device] @@ -394,14 +394,13 @@ def _set_and_verify_date(): get_date_command, as_root=True, single_line=True).replace('"', '') device_time = datetime.datetime.strptime(device_time, "%Y%m%d.%H%M%S") correct_time = datetime.datetime.strptime(strgmtime, date_format) - tdelta = (correct_time - device_time).seconds + tdelta = abs(correct_time - device_time).seconds if tdelta <= 1: logging.info('Date/time successfully set on %s', device) return True - else: - logging.error('Date mismatch. Device: %s Correct: %s', - device_time.isoformat(), correct_time.isoformat()) - return False + logging.error('Date mismatch. Device: %s Correct: %s', + device_time.isoformat(), correct_time.isoformat()) + return False # Sometimes the date is not set correctly on the devices. Retry on failure. if device.IsUserBuild(): diff --git a/build/android/pylib/__init__.py b/build/android/pylib/__init__.py index 0841981bea9c..ea260d44a639 100644 --- a/build/android/pylib/__init__.py +++ b/build/android/pylib/__init__.py @@ -1,14 +1,15 @@ -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import import os import sys -_THIRD_PARTY_PATH = os.path.abspath( - os.path.join(os.path.dirname(__file__), '..', '..', '..', 'third_party')) +_SRC_PATH = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..', '..', '..')) + +_THIRD_PARTY_PATH = os.path.join(_SRC_PATH, 'third_party') _CATAPULT_PATH = os.path.join(_THIRD_PARTY_PATH, 'catapult') @@ -22,6 +23,7 @@ _TRACE2HTML_PATH = os.path.join(_CATAPULT_PATH, 'tracing') +_BUILD_UTIL_PATH = os.path.join(_SRC_PATH, 'build', 'util') if _DEVIL_PATH not in sys.path: sys.path.append(_DEVIL_PATH) @@ -37,3 +39,6 @@ if _SIX_PATH not in sys.path: sys.path.append(_SIX_PATH) + +if _BUILD_UTIL_PATH not in sys.path: + sys.path.insert(0, _BUILD_UTIL_PATH) diff --git a/build/android/pylib/android/__init__.py b/build/android/pylib/android/__init__.py index a67c3501b20a..68130d5941d9 100644 --- a/build/android/pylib/android/__init__.py +++ b/build/android/pylib/android/__init__.py @@ -1,3 +1,3 @@ -# Copyright (c) 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/android/logcat_symbolizer.py b/build/android/pylib/android/logcat_symbolizer.py index 78e18f55cfc1..01d930aea039 100644 --- a/build/android/pylib/android/logcat_symbolizer.py +++ b/build/android/pylib/android/logcat_symbolizer.py @@ -1,8 +1,7 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import import re from devil.android import logcat_monitor diff --git a/build/android/pylib/base/__init__.py b/build/android/pylib/base/__init__.py index 96196cffb272..5ffa28413724 100644 --- a/build/android/pylib/base/__init__.py +++ b/build/android/pylib/base/__init__.py @@ -1,3 +1,3 @@ -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/base/base_test_result.py b/build/android/pylib/base/base_test_result.py index 03f00f253e04..e5fbab54f0d3 100644 --- a/build/android/pylib/base/base_test_result.py +++ b/build/android/pylib/base/base_test_result.py @@ -1,36 +1,32 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Module containing base test results classes.""" -from __future__ import absolute_import -import threading -import six - -class ResultType(object): - """Class enumerating test types.""" - # The test passed. - PASS = 'SUCCESS' - - # The test was intentionally skipped. - SKIP = 'SKIPPED' +import functools +import sys +import threading - # The test failed. - FAIL = 'FAILURE' +from lib.results import result_types # pylint: disable=import-error - # The test caused the containing process to crash. - CRASH = 'CRASH' +# This must match the source adding the suffix: bit.ly/3Zmwwyx +_MULTIPROCESS_SUFFIX = '__multiprocess_mode' - # The test timed out. - TIMEOUT = 'TIMEOUT' - # The test ran, but we couldn't determine what happened. - UNKNOWN = 'UNKNOWN' +class ResultType: + """Class enumerating test types. - # The test did not run. - NOTRUN = 'NOTRUN' + Wraps the results defined in //build/util/lib/results/. + """ + PASS = result_types.PASS + SKIP = result_types.SKIP + FAIL = result_types.FAIL + CRASH = result_types.CRASH + TIMEOUT = result_types.TIMEOUT + UNKNOWN = result_types.UNKNOWN + NOTRUN = result_types.NOTRUN @staticmethod def GetTypes(): @@ -40,10 +36,11 @@ def GetTypes(): ResultType.NOTRUN] -class BaseTestResult(object): +@functools.total_ordering +class BaseTestResult: """Base class for a single test result.""" - def __init__(self, name, test_type, duration=0, log=''): + def __init__(self, name, test_type, duration=0, log='', failure_reason=None): """Construct a BaseTestResult. Args: @@ -58,7 +55,9 @@ def __init__(self, name, test_type, duration=0, log=''): self._test_type = test_type self._duration = duration self._log = log + self._failure_reason = failure_reason self._links = {} + self._webview_multiprocess_mode = name.endswith(_MULTIPROCESS_SUFFIX) def __str__(self): return self._name @@ -66,9 +65,11 @@ def __str__(self): def __repr__(self): return self._name - def __cmp__(self, other): - # pylint: disable=W0212 - return cmp(self._name, other._name) + def __eq__(self, other): + return self.GetName() == other.GetName() + + def __lt__(self, other): + return self.GetName() == other.GetName() def __hash__(self): return hash(self._name) @@ -85,6 +86,16 @@ def GetName(self): """Get the test name.""" return self._name + def GetNameForResultSink(self): + """Get the test name to be reported to resultsink.""" + raw_name = self.GetName() + if self._webview_multiprocess_mode: + assert raw_name.endswith( + _MULTIPROCESS_SUFFIX + ), 'multiprocess mode test raw name should have the corresponding suffix' + return raw_name[:-len(_MULTIPROCESS_SUFFIX)] + return raw_name + def SetType(self, test_type): """Set the test result type.""" assert test_type in ResultType.GetTypes() @@ -106,6 +117,22 @@ def GetLog(self): """Get the test log.""" return self._log + def SetFailureReason(self, failure_reason): + """Set the reason the test failed. + + This should be the first failure the test encounters and exclude any stack + trace. + """ + self._failure_reason = failure_reason + + def GetFailureReason(self): + """Get the reason the test failed. + + Returns None if the test did not fail or if the reason the test failed is + unknown. + """ + return self._failure_reason + def SetLink(self, name, link_url): """Set link with test result data.""" self._links[name] = link_url @@ -114,8 +141,14 @@ def GetLinks(self): """Get dict containing links to test result data.""" return self._links + def GetVariantForResultSink(self): + """Get the variant dict to be reported to result sink.""" + if self._webview_multiprocess_mode: + return {'webview_multiprocess_mode': 'Yes'} + return None + -class TestRunResults(object): +class TestRunResults: """Set of results for a test run.""" def __init__(self): @@ -141,7 +174,10 @@ def GetLogs(self): log = t.GetLog() if log: s.append('[%s] %s:' % (test_type, t)) - s.append(six.text_type(log, 'utf-8')) + s.append(log) + if sys.version_info.major == 2: + decoded = [u.decode(encoding='utf-8', errors='ignore') for u in s] + return '\n'.join(decoded) return '\n'.join(s) def GetGtestForm(self): diff --git a/build/android/pylib/base/base_test_result_unittest.py b/build/android/pylib/base/base_test_result_unittest.py index 31a1f6000651..955a59f3a9fe 100644 --- a/build/android/pylib/base/base_test_result_unittest.py +++ b/build/android/pylib/base/base_test_result_unittest.py @@ -1,10 +1,10 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Unittests for TestRunResults.""" -from __future__ import absolute_import + import unittest from pylib.base.base_test_result import BaseTestResult diff --git a/build/android/pylib/base/environment.py b/build/android/pylib/base/environment.py index 744c392c1bc1..0c4326a0b395 100644 --- a/build/android/pylib/base/environment.py +++ b/build/android/pylib/base/environment.py @@ -1,8 +1,11 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +# TODO(1262303): After Telemetry is supported by python3 we can remove +# object inheritance from this script. +# pylint: disable=useless-object-inheritance class Environment(object): """An environment in which tests can be run. diff --git a/build/android/pylib/base/environment_factory.py b/build/android/pylib/base/environment_factory.py index 2ff93f340d85..377e0f7081d2 100644 --- a/build/android/pylib/base/environment_factory.py +++ b/build/android/pylib/base/environment_factory.py @@ -1,8 +1,8 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import + from pylib import constants from pylib.local.device import local_device_environment from pylib.local.machine import local_machine_environment @@ -23,12 +23,13 @@ def CreateEnvironment(args, output_manager, error_func): if args.avd_config: if not local_emulator_environment: error_func('emulator environment requested but not available.') + raise RuntimeError('error_func must call exit inside.') return local_emulator_environment.LocalEmulatorEnvironment( args, output_manager, error_func) return local_device_environment.LocalDeviceEnvironment( args, output_manager, error_func) - else: - return local_machine_environment.LocalMachineEnvironment( - args, output_manager, error_func) + return local_machine_environment.LocalMachineEnvironment( + args, output_manager, error_func) error_func('Unable to create %s environment.' % args.environment) + raise RuntimeError('error_func must call exit inside.') diff --git a/build/android/pylib/base/mock_environment.py b/build/android/pylib/base/mock_environment.py index d7293c7bc744..c537f05b3e3b 100644 --- a/build/android/pylib/base/mock_environment.py +++ b/build/android/pylib/base/mock_environment.py @@ -1,8 +1,8 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import + from pylib.base import environment import mock # pylint: disable=import-error diff --git a/build/android/pylib/base/mock_test_instance.py b/build/android/pylib/base/mock_test_instance.py index 19a1d7e9f799..547a84b569f0 100644 --- a/build/android/pylib/base/mock_test_instance.py +++ b/build/android/pylib/base/mock_test_instance.py @@ -1,8 +1,8 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import + from pylib.base import test_instance import mock # pylint: disable=import-error diff --git a/build/android/pylib/base/output_manager.py b/build/android/pylib/base/output_manager.py index 53e5aea6f4ba..f562be85f55c 100644 --- a/build/android/pylib/base/output_manager.py +++ b/build/android/pylib/base/output_manager.py @@ -1,8 +1,8 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import + import contextlib import logging import os @@ -11,14 +11,14 @@ from devil.utils import reraiser_thread -class Datatype(object): +class Datatype: HTML = 'text/html' JSON = 'application/json' PNG = 'image/png' TEXT = 'text/plain' -class OutputManager(object): +class OutputManager: def __init__(self): """OutputManager Constructor. @@ -51,26 +51,38 @@ def ArchivedTempfile( if not self._allow_upload: raise Exception('Must run |SetUp| before attempting to upload!') - f = self._CreateArchivedFile(out_filename, out_subdir, datatype) + f = self.CreateArchivedFile(out_filename, out_subdir, datatype) try: yield f finally: - f.PrepareArchive() - - def archive(): - try: - f.Archive() - finally: - f.Delete() + self.ArchiveArchivedFile(f, delete=True) - thread = reraiser_thread.ReraiserThread(func=archive) - thread.start() - self._thread_group.Add(thread) + def CreateArchivedFile(self, out_filename, out_subdir, + datatype=Datatype.TEXT): + """Returns an instance of ArchivedFile.""" + return self._CreateArchivedFile(out_filename, out_subdir, datatype) def _CreateArchivedFile(self, out_filename, out_subdir, datatype): - """Returns an instance of ArchivedFile.""" raise NotImplementedError + def ArchiveArchivedFile(self, archived_file, delete=False): + """Archive an ArchivedFile instance and optionally delete it.""" + if not isinstance(archived_file, ArchivedFile): + raise Exception('Excepting an instance of ArchivedFile, got %s.' % + type(archived_file)) + archived_file.PrepareArchive() + + def archive(): + try: + archived_file.Archive() + finally: + if delete: + archived_file.Delete() + + thread = reraiser_thread.ReraiserThread(func=archive) + thread.start() + self._thread_group.Add(thread) + def SetUp(self): self._allow_upload = True self._thread_group = reraiser_thread.ReraiserThreadGroup() @@ -88,20 +100,29 @@ def __exit__(self, _exc_type, _exc_val, _exc_tb): self.TearDown() -class ArchivedFile(object): +class ArchivedFile: def __init__(self, out_filename, out_subdir, datatype): self._out_filename = out_filename self._out_subdir = out_subdir self._datatype = datatype - self._f = tempfile.NamedTemporaryFile(delete=False) + mode = 'w+' + if datatype == Datatype.PNG: + mode = 'w+b' + self._f = tempfile.NamedTemporaryFile(mode=mode, delete=False) self._ready_to_archive = False @property def name(self): return self._f.name + def fileno(self, *args, **kwargs): + if self._ready_to_archive: + raise Exception('Cannot retrieve the integer file descriptor ' + 'after archiving has begun!') + return self._f.fileno(*args, **kwargs) + def write(self, *args, **kwargs): if self._ready_to_archive: raise Exception('Cannot write to file after archiving has begun!') @@ -141,7 +162,6 @@ def _PrepareArchive(self): content addressed files. This is called after the file is written but before archiving has begun. """ - pass def Archive(self): """Archives file.""" diff --git a/build/android/pylib/base/output_manager_factory.py b/build/android/pylib/base/output_manager_factory.py index 891692d950a5..378a89a2e05d 100644 --- a/build/android/pylib/base/output_manager_factory.py +++ b/build/android/pylib/base/output_manager_factory.py @@ -1,8 +1,8 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import + from pylib import constants from pylib.output import local_output_manager from pylib.output import remote_output_manager @@ -13,6 +13,5 @@ def CreateOutputManager(args): if args.local_output or not local_utils.IsOnSwarming(): return local_output_manager.LocalOutputManager( output_dir=constants.GetOutDirectory()) - else: - return remote_output_manager.RemoteOutputManager( - bucket=args.gs_results_bucket) + return remote_output_manager.RemoteOutputManager( + bucket=args.gs_results_bucket) diff --git a/build/android/pylib/base/output_manager_test_case.py b/build/android/pylib/base/output_manager_test_case.py index 7b7e462f177e..7349fd171517 100644 --- a/build/android/pylib/base/output_manager_test_case.py +++ b/build/android/pylib/base/output_manager_test_case.py @@ -1,8 +1,8 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import + import os.path import unittest diff --git a/build/android/pylib/base/test_collection.py b/build/android/pylib/base/test_collection.py index 83b3bf89be27..3b9fec047e53 100644 --- a/build/android/pylib/base/test_collection.py +++ b/build/android/pylib/base/test_collection.py @@ -1,11 +1,12 @@ -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import + import threading -class TestCollection(object): + +class TestCollection: """A threadsafe collection of tests. Args: diff --git a/build/android/pylib/base/test_exception.py b/build/android/pylib/base/test_exception.py index c98d2cb73eb8..6dd31cdf8bc8 100644 --- a/build/android/pylib/base/test_exception.py +++ b/build/android/pylib/base/test_exception.py @@ -1,8 +1,7 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. class TestException(Exception): """Base class for exceptions thrown by the test runner.""" - pass diff --git a/build/android/pylib/base/test_instance.py b/build/android/pylib/base/test_instance.py index 7b1099cffa8a..9a4e922ea66b 100644 --- a/build/android/pylib/base/test_instance.py +++ b/build/android/pylib/base/test_instance.py @@ -1,9 +1,9 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -class TestInstance(object): +class TestInstance: """A type of test. This is expected to handle all logic that is test-type specific but diff --git a/build/android/pylib/base/test_instance_factory.py b/build/android/pylib/base/test_instance_factory.py index f47242a60303..3b129742271a 100644 --- a/build/android/pylib/base/test_instance_factory.py +++ b/build/android/pylib/base/test_instance_factory.py @@ -1,8 +1,8 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import + from pylib.gtest import gtest_test_instance from pylib.instrumentation import instrumentation_test_instance from pylib.junit import junit_test_instance @@ -15,12 +15,13 @@ def CreateTestInstance(args, error_func): if args.command == 'gtest': return gtest_test_instance.GtestTestInstance( args, device_dependencies.GetDataDependencies, error_func) - elif args.command == 'instrumentation': + if args.command == 'instrumentation': return instrumentation_test_instance.InstrumentationTestInstance( args, device_dependencies.GetDataDependencies, error_func) - elif args.command == 'junit': + if args.command == 'junit': return junit_test_instance.JunitTestInstance(args, error_func) - elif args.command == 'monkey': + if args.command == 'monkey': return monkey_test_instance.MonkeyTestInstance(args, error_func) error_func('Unable to create %s test instance.' % args.command) + raise RuntimeError('error_func must call exit inside.') diff --git a/build/android/pylib/base/test_run.py b/build/android/pylib/base/test_run.py index fc72d3a5476a..36aca96c6d88 100644 --- a/build/android/pylib/base/test_run.py +++ b/build/android/pylib/base/test_run.py @@ -1,9 +1,9 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -class TestRun(object): +class TestRun: """An execution of a particular test on a particular device. This is expected to handle all logic that is specific to the combination of @@ -27,15 +27,20 @@ def TestPackage(self): def SetUp(self): raise NotImplementedError - def RunTests(self, results): + def RunTests(self, results, raw_logs_fh=None): """Runs Tests and populates |results|. Args: results: An array that should be populated with |base_test_result.TestRunResults| objects. + raw_logs_fh: An optional file handle to write raw logs to. """ raise NotImplementedError + def GetTestsForListing(self): + """Returns a list of test names.""" + raise NotImplementedError + def TearDown(self): raise NotImplementedError diff --git a/build/android/pylib/base/test_run_factory.py b/build/android/pylib/base/test_run_factory.py index 35d5494d03b4..5806a4fe934c 100644 --- a/build/android/pylib/base/test_run_factory.py +++ b/build/android/pylib/base/test_run_factory.py @@ -1,8 +1,8 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import + from pylib.gtest import gtest_test_instance from pylib.instrumentation import instrumentation_test_instance from pylib.junit import junit_test_instance @@ -34,3 +34,4 @@ def CreateTestRun(env, test_instance, error_func): error_func('Unable to create test run for %s tests in %s environment' % (str(test_instance), str(env))) + raise RuntimeError('error_func must call exit inside.') diff --git a/build/android/pylib/base/test_server.py b/build/android/pylib/base/test_server.py index 763e1212c368..d1fda4b7fb4b 100644 --- a/build/android/pylib/base/test_server.py +++ b/build/android/pylib/base/test_server.py @@ -1,8 +1,9 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -class TestServer(object): + +class TestServer: """Base class for any server that needs to be set up for the tests.""" def __init__(self, *args, **kwargs): diff --git a/build/android/pylib/constants/__init__.py b/build/android/pylib/constants/__init__.py index 2d1be260fb03..cf57d9fe91bb 100644 --- a/build/android/pylib/constants/__init__.py +++ b/build/android/pylib/constants/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -8,12 +8,10 @@ # pylint: disable=W0212 -from __future__ import absolute_import -import collections + import glob import logging import os -import subprocess import devil.android.sdk.keyevent from devil.android.constants import chrome @@ -27,6 +25,7 @@ DIR_SOURCE_ROOT = os.environ.get('CHECKOUT_SOURCE_ROOT', os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, os.pardir))) +JAVA_HOME = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current') PACKAGE_INFO = dict(chrome.PACKAGE_INFO) PACKAGE_INFO.update({ @@ -72,9 +71,9 @@ chrome.PackageInfo('com.google.android.webview', 'com.android.cts.webkit.WebViewStartupCtsActivity', 'webview-command-line', None), - 'android_system_webview_shell': - chrome.PackageInfo('org.chromium.webview_shell', - 'org.chromium.webview_shell.WebViewBrowserActivity', + 'android_google_webview_cts_debug': + chrome.PackageInfo('com.google.android.webview.debug', + 'com.android.cts.webkit.WebViewStartupCtsActivity', 'webview-command-line', None), 'android_webview_ui_test': chrome.PackageInfo('org.chromium.webview_ui_test', @@ -115,7 +114,7 @@ SCREENSHOTS_DIR = os.path.join(DIR_SOURCE_ROOT, 'out_screenshots') -ANDROID_SDK_BUILD_TOOLS_VERSION = '30.0.1' +ANDROID_SDK_BUILD_TOOLS_VERSION = '33.0.0' ANDROID_SDK_ROOT = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'android_sdk', 'public') ANDROID_SDK_TOOLS = os.path.join(ANDROID_SDK_ROOT, @@ -151,13 +150,13 @@ 'devil.android.md5sum_test', 'devil.utils.cmd_helper_test', 'pylib.results.json_results_test', - 'pylib.utils.proguard_test', ] }, 'gyp_py_unittests': { 'path': os.path.join(DIR_SOURCE_ROOT, 'build', 'android', 'gyp'), 'test_modules': [ + 'create_unwind_table_tests', 'java_cpp_enum_tests', 'java_cpp_strings_tests', 'java_google_api_keys_tests', @@ -202,7 +201,7 @@ def SetOutputDirectory(output_directory): CheckOutputDirectory(). Typically by providing an --output-dir or --chromium-output-dir option. """ - os.environ['CHROMIUM_OUTPUT_DIR'] = output_directory + os.environ['CHROMIUM_OUTPUT_DIR'] = os.path.abspath(output_directory) # The message that is printed when the Chromium output directory cannot diff --git a/build/android/pylib/constants/host_paths.py b/build/android/pylib/constants/host_paths.py index a38d28e27ea8..4b712649e763 100644 --- a/build/android/pylib/constants/host_paths.py +++ b/build/android/pylib/constants/host_paths.py @@ -1,8 +1,8 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import + import contextlib import os import sys diff --git a/build/android/pylib/constants/host_paths_unittest.py b/build/android/pylib/constants/host_paths_unittest.py index 72be4edac441..3ce406f758dd 100755 --- a/build/android/pylib/constants/host_paths_unittest.py +++ b/build/android/pylib/constants/host_paths_unittest.py @@ -1,9 +1,9 @@ -#!/usr/bin/env python -# Copyright 2018 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import + import logging import os import unittest diff --git a/build/android/pylib/content_settings.py b/build/android/pylib/content_settings.py index 5ea7c525ed2e..ddd663f06b4e 100644 --- a/build/android/pylib/content_settings.py +++ b/build/android/pylib/content_settings.py @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -11,7 +11,7 @@ class ContentSettings(dict): """ def __init__(self, table, device): - super(ContentSettings, self).__init__() + super().__init__() self._table = table self._device = device diff --git a/build/android/pylib/device/commands/BUILD.gn b/build/android/pylib/device/commands/BUILD.gn index 13b69f618cfa..2f0273487ec1 100644 --- a/build/android/pylib/device/commands/BUILD.gn +++ b/build/android/pylib/device/commands/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java b/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java index cf0ff67af261..b322e32c3863 100644 --- a/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java +++ b/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java @@ -1,4 +1,4 @@ -// Copyright 2014 The Chromium Authors. All rights reserved. +// Copyright 2014 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/pylib/device_settings.py b/build/android/pylib/device_settings.py index 1fa0903fc94d..83d933283633 100644 --- a/build/android/pylib/device_settings.py +++ b/build/android/pylib/device_settings.py @@ -1,8 +1,7 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import import logging import six diff --git a/build/android/pylib/dex/__init__.py b/build/android/pylib/dex/__init__.py index 4a12e35c9256..401c54b0d9c6 100644 --- a/build/android/pylib/dex/__init__.py +++ b/build/android/pylib/dex/__init__.py @@ -1,3 +1,3 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/dex/dex_parser.py b/build/android/pylib/dex/dex_parser.py index 3f2ed6f6331b..90029177e55d 100755 --- a/build/android/pylib/dex/dex_parser.py +++ b/build/android/pylib/dex/dex_parser.py @@ -1,8 +1,7 @@ -#!/usr/bin/env python -# Copyright 2019 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. - """Utilities for optimistically parsing dex files. This file is not meant to provide a generic tool for analyzing dex files. @@ -10,8 +9,6 @@ is provided, but it does not include error handling or validation. """ -from __future__ import print_function - import argparse import collections import errno @@ -65,7 +62,7 @@ 'annotations_off,class_data_off,static_values_off') -class _MemoryItemList(object): +class _MemoryItemList: """Base class for repeated memory items.""" def __init__(self, @@ -91,7 +88,7 @@ def __init__(self, self.offset = offset self.size = size reader.Seek(first_item_offset or offset) - self._items = [factory(reader) for _ in xrange(size)] + self._items = [factory(reader) for _ in range(size)] if alignment: reader.AlignUpTo(alignment) @@ -116,43 +113,38 @@ def __repr__(self): class _TypeIdItemList(_MemoryItemList): - def __init__(self, reader, offset, size): factory = lambda x: _TypeIdItem(x.ReadUInt()) - super(_TypeIdItemList, self).__init__(reader, offset, size, factory) + super().__init__(reader, offset, size, factory) class _ProtoIdItemList(_MemoryItemList): - def __init__(self, reader, offset, size): factory = lambda x: _ProtoIdItem(x.ReadUInt(), x.ReadUInt(), x.ReadUInt()) - super(_ProtoIdItemList, self).__init__(reader, offset, size, factory) + super().__init__(reader, offset, size, factory) class _MethodIdItemList(_MemoryItemList): - def __init__(self, reader, offset, size): factory = ( lambda x: _MethodIdItem(x.ReadUShort(), x.ReadUShort(), x.ReadUInt())) - super(_MethodIdItemList, self).__init__(reader, offset, size, factory) + super().__init__(reader, offset, size, factory) class _StringItemList(_MemoryItemList): - def __init__(self, reader, offset, size): reader.Seek(offset) - string_item_offsets = iter([reader.ReadUInt() for _ in xrange(size)]) + string_item_offsets = iter([reader.ReadUInt() for _ in range(size)]) def factory(x): data_offset = next(string_item_offsets) string = x.ReadString(data_offset) return _StringDataItem(len(string), string) - super(_StringItemList, self).__init__(reader, offset, size, factory) + super().__init__(reader, offset, size, factory) class _TypeListItem(_MemoryItemList): - def __init__(self, reader): offset = reader.Tell() size = reader.ReadUInt() @@ -160,35 +152,31 @@ def __init__(self, reader): # This is necessary because we need to extract the size of the type list # (in other cases the list size is provided in the header). first_item_offset = reader.Tell() - super(_TypeListItem, self).__init__( - reader, - offset, - size, - factory, - alignment=4, - first_item_offset=first_item_offset) + super().__init__(reader, + offset, + size, + factory, + alignment=4, + first_item_offset=first_item_offset) class _TypeListItemList(_MemoryItemList): - def __init__(self, reader, offset, size): - super(_TypeListItemList, self).__init__(reader, offset, size, _TypeListItem) + super().__init__(reader, offset, size, _TypeListItem) class _ClassDefItemList(_MemoryItemList): - def __init__(self, reader, offset, size): reader.Seek(offset) def factory(x): return _ClassDefItem(*(x.ReadUInt() - for _ in xrange(len(_ClassDefItem._fields)))) + for _ in range(len(_ClassDefItem._fields)))) - super(_ClassDefItemList, self).__init__(reader, offset, size, factory) + super().__init__(reader, offset, size, factory) -class _DexMapItem(object): - +class _DexMapItem: def __init__(self, reader): self.type = reader.ReadUShort() reader.ReadUShort() @@ -200,7 +188,7 @@ def __repr__(self): self.type, self.size, self.offset) -class _DexMapList(object): +class _DexMapList: # Full list of type codes: # https://source.android.com/devices/tech/dalvik/dex-format#type-codes TYPE_TYPE_LIST = 0x1001 @@ -209,7 +197,7 @@ def __init__(self, reader, offset): self._map = {} reader.Seek(offset) self._size = reader.ReadUInt() - for _ in xrange(self._size): + for _ in range(self._size): item = _DexMapItem(reader) self._map[item.type] = item @@ -223,8 +211,7 @@ def __repr__(self): return '_DexMapList(size={}, items={})'.format(self._size, self._map) -class _DexReader(object): - +class _DexReader: def __init__(self, data): self._data = data self._pos = 0 @@ -299,7 +286,7 @@ def _DecodeMUtf8(self, string_length, offset): self.Seek(offset) ret = '' - for _ in xrange(string_length): + for _ in range(string_length): a = self.ReadUByte() if a == 0: raise _MUTf8DecodeError('Early string termination encountered', @@ -319,8 +306,7 @@ def _DecodeMUtf8(self, string_length, offset): code = ((a & 0x0f) << 12) | ((b & 0x3f) << 6) | (c & 0x3f) else: raise _MUTf8DecodeError('Bad byte', string_length, offset) - - ret += unichr(code) + ret += chr(code) if self.ReadUByte() != 0x00: raise _MUTf8DecodeError('Expected string termination', string_length, @@ -330,14 +316,13 @@ def _DecodeMUtf8(self, string_length, offset): class _MUTf8DecodeError(Exception): - def __init__(self, message, length, offset): message += ' (decoded string length: {}, string data offset: {:#x})'.format( length, offset) - super(_MUTf8DecodeError, self).__init__(message) + super().__init__(message) -class DexFile(object): +class DexFile: """Represents a single dex file. Parses and exposes access to dex file structure and contents, as described @@ -380,20 +365,25 @@ def __init__(self, data): self.map_list = _DexMapList(self.reader, self.header.map_off) self.type_item_list = _TypeIdItemList(self.reader, self.header.type_ids_off, self.header.type_ids_size) - self.proto_item_list = _ProtoIdItemList( - self.reader, self.header.proto_ids_off, self.header.proto_ids_size) - self.method_item_list = _MethodIdItemList( - self.reader, self.header.method_ids_off, self.header.method_ids_size) - self.string_item_list = _StringItemList( - self.reader, self.header.string_ids_off, self.header.string_ids_size) - self.class_def_item_list = _ClassDefItemList( - self.reader, self.header.class_defs_off, self.header.class_defs_size) + self.proto_item_list = _ProtoIdItemList(self.reader, + self.header.proto_ids_off, + self.header.proto_ids_size) + self.method_item_list = _MethodIdItemList(self.reader, + self.header.method_ids_off, + self.header.method_ids_size) + self.string_item_list = _StringItemList(self.reader, + self.header.string_ids_off, + self.header.string_ids_size) + self.class_def_item_list = _ClassDefItemList(self.reader, + self.header.class_defs_off, + self.header.class_defs_size) type_list_key = _DexMapList.TYPE_TYPE_LIST if type_list_key in self.map_list: map_list_item = self.map_list[type_list_key] - self.type_list_item_list = _TypeListItemList( - self.reader, map_list_item.offset, map_list_item.size) + self.type_list_item_list = _TypeListItemList(self.reader, + map_list_item.offset, + map_list_item.size) else: self.type_list_item_list = _TypeListItemList(self.reader, 0, 0) self._type_lists_by_offset = { @@ -417,10 +407,9 @@ def GetTypeListStringsByOffset(self, offset): @staticmethod def ResolveClassAccessFlags(access_flags): - return tuple( - flag_string - for flag, flag_string in DexFile._CLASS_ACCESS_FLAGS.iteritems() - if flag & access_flags) + return tuple(flag_string + for flag, flag_string in DexFile._CLASS_ACCESS_FLAGS.items() + if flag & access_flags) def IterMethodSignatureParts(self): """Yields the string components of dex methods in a dex file. @@ -453,8 +442,7 @@ def __repr__(self): return '\n'.join(str(item) for item in items) -class _DumpCommand(object): - +class _DumpCommand: def __init__(self, dexfile): self._dexfile = dexfile @@ -463,7 +451,6 @@ def Run(self): class _DumpMethods(_DumpCommand): - def Run(self): for parts in self._dexfile.IterMethodSignatureParts(): class_type, return_type, method_name, parameter_types = parts @@ -472,7 +459,6 @@ def Run(self): class _DumpStrings(_DumpCommand): - def Run(self): for string_item in self._dexfile.string_item_list: # Some strings are likely to be non-ascii (vs. methods/classes). @@ -480,7 +466,6 @@ def Run(self): class _DumpClasses(_DumpCommand): - def Run(self): for class_item in self._dexfile.class_def_item_list: class_string = self._dexfile.GetTypeString(class_item.class_idx) @@ -493,7 +478,6 @@ def Run(self): class _DumpSummary(_DumpCommand): - def Run(self): print(self._dexfile) @@ -517,14 +501,13 @@ def _DumpDexItems(dexfile_data, name, item): def main(): parser = argparse.ArgumentParser(description='Dump dex contents to stdout.') - parser.add_argument( - 'input', help='Input (.dex, .jar, .zip, .aab, .apk) file path.') - parser.add_argument( - 'item', - choices=('methods', 'strings', 'classes', 'summary'), - help='Item to dump', - nargs='?', - default='summary') + parser.add_argument('input', + help='Input (.dex, .jar, .zip, .aab, .apk) file path.') + parser.add_argument('item', + choices=('methods', 'strings', 'classes', 'summary'), + help='Item to dump', + nargs='?', + default='summary') args = parser.parse_args() if os.path.splitext(args.input)[1] in ('.apk', '.jar', '.zip', '.aab'): @@ -541,7 +524,7 @@ def main(): _DumpDexItems(z.read(path), path, args.item) else: - with open(args.input) as f: + with open(args.input, 'rb') as f: _DumpDexItems(f.read(), args.input, args.item) diff --git a/build/android/pylib/gtest/__init__.py b/build/android/pylib/gtest/__init__.py index 96196cffb272..5ffa28413724 100644 --- a/build/android/pylib/gtest/__init__.py +++ b/build/android/pylib/gtest/__init__.py @@ -1,3 +1,3 @@ -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/gtest/filter/unit_tests_disabled b/build/android/pylib/gtest/filter/unit_tests_disabled index 97811c83a4ad..c8564bf3eef0 100644 --- a/build/android/pylib/gtest/filter/unit_tests_disabled +++ b/build/android/pylib/gtest/filter/unit_tests_disabled @@ -47,23 +47,13 @@ ProtocolHandlerRegistryTest.TestOSRegistrationFailure SQLiteServerBoundCertStoreTest.TestUpgradeV1 SQLiteServerBoundCertStoreTest.TestUpgradeV2 -ProfileSyncComponentsFactoryImplTest.* PermissionsTest.GetWarningMessages_Plugins ImageOperations.ResizeShouldAverageColors -# crbug.com/139643 -VariationsUtilTest.DisableAfterInitialization -VariationsUtilTest.AssociateGoogleVariationID -VariationsUtilTest.NoAssociation - # crbug.com/141473 AutofillManagerTest.UpdatePasswordSyncState AutofillManagerTest.UpdatePasswordGenerationState -# crbug.com/145843 -EntropyProviderTest.UseOneTimeRandomizationSHA1 -EntropyProviderTest.UseOneTimeRandomizationPermuted - # crbug.com/147500 ManifestTest.RestrictedKeys diff --git a/build/android/pylib/gtest/gtest_config.py b/build/android/pylib/gtest/gtest_config.py index 3ac195586c11..a7b0a04e656a 100644 --- a/build/android/pylib/gtest/gtest_config.py +++ b/build/android/pylib/gtest/gtest_config.py @@ -1,4 +1,4 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -22,7 +22,6 @@ 'android_webview_unittests', 'base_unittests', 'blink_unittests', - 'breakpad_unittests', 'cc_unittests', 'components_unittests', 'content_browsertests', @@ -48,7 +47,6 @@ # Tests fail in component=shared_library build, which is required for ASan. # http://crbug.com/344868 ASAN_EXCLUDED_TEST_SUITES = [ - 'breakpad_unittests', 'sandbox_linux_unittests', # The internal ASAN recipe cannot run step "unit_tests_apk", this is the diff --git a/build/android/pylib/gtest/gtest_test_instance.py b/build/android/pylib/gtest/gtest_test_instance.py index 452e414acf9a..232376945fb4 100644 --- a/build/android/pylib/gtest/gtest_test_instance.py +++ b/build/android/pylib/gtest/gtest_test_instance.py @@ -1,9 +1,7 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import - import json import logging import os @@ -319,7 +317,7 @@ def TestNameWithoutDisabledPrefix(test_name): class GtestTestInstance(test_instance.TestInstance): def __init__(self, args, data_deps_delegate, error_func): - super(GtestTestInstance, self).__init__() + super().__init__() # TODO(jbudorick): Support multiple test suites. if len(args.suite_name) > 1: raise ValueError('Platform mode currently supports only 1 gtest suite') @@ -339,6 +337,7 @@ def __init__(self, args, data_deps_delegate, error_func): self._symbolizer = stack_symbolizer.Symbolizer(None) self._total_external_shards = args.test_launcher_total_shards self._wait_for_java_debugger = args.wait_for_java_debugger + self._use_existing_test_data = args.use_existing_test_data # GYP: if args.executable_dist_dir: @@ -385,7 +384,7 @@ def __init__(self, args, data_deps_delegate, error_func): error_func('Could not find apk or executable for %s' % self._suite) self._data_deps = [] - self._gtest_filter = test_filter.InitializeFilterFromArgs(args) + self._gtest_filters = test_filter.InitializeFiltersFromArgs(args) self._run_disabled = args.run_disabled self._data_deps_delegate = data_deps_delegate @@ -474,8 +473,8 @@ def gs_test_artifacts_bucket(self): return self._gs_test_artifacts_bucket @property - def gtest_filter(self): - return self._gtest_filter + def gtest_filters(self): + return self._gtest_filters @property def isolated_script_test_output(self): @@ -533,6 +532,10 @@ def total_external_shards(self): def wait_for_java_debugger(self): return self._wait_for_java_debugger + @property + def use_existing_test_data(self): + return self._use_existing_test_data + #override def TestType(self): return 'gtest' @@ -570,8 +573,8 @@ def FilterTests(self, test_list, disabled_prefixes=None): """ gtest_filter_strings = [ self._GenerateDisabledFilterString(disabled_prefixes)] - if self._gtest_filter: - gtest_filter_strings.append(self._gtest_filter) + if self._gtest_filters: + gtest_filter_strings.extend(self._gtest_filters) filtered_test_list = test_list # This lock is required because on older versions of Python @@ -582,12 +585,16 @@ def FilterTests(self, test_list, disabled_prefixes=None): filtered_test_list = unittest_util.FilterTestNames( filtered_test_list, gtest_filter_string) - if self._run_disabled and self._gtest_filter: + if self._run_disabled and self._gtest_filters: out_filtered_test_list = list(set(test_list)-set(filtered_test_list)) for test in out_filtered_test_list: test_name_no_disabled = TestNameWithoutDisabledPrefix(test) - if test_name_no_disabled != test and unittest_util.FilterTestNames( - [test_name_no_disabled], self._gtest_filter): + if test_name_no_disabled == test: + continue + if all( + unittest_util.FilterTestNames([test_name_no_disabled], + gtest_filter) + for gtest_filter in self._gtest_filters): filtered_test_list.append(test) return filtered_test_list @@ -618,4 +625,3 @@ def _GenerateDisabledFilterString(self, disabled_prefixes): #override def TearDown(self): """Do nothing.""" - pass diff --git a/build/android/pylib/gtest/gtest_test_instance_test.py b/build/android/pylib/gtest/gtest_test_instance_test.py index d5d20045d6aa..eb860295fedb 100755 --- a/build/android/pylib/gtest/gtest_test_instance_test.py +++ b/build/android/pylib/gtest/gtest_test_instance_test.py @@ -1,9 +1,8 @@ -#!/usr/bin/env vpython -# Copyright 2014 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import import unittest from pylib.base import base_test_result @@ -100,10 +99,10 @@ def testParseGTestOutput_pass(self): '[ OK ] FooTest.Bar (1 ms)', ] actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) - self.assertEquals(1, len(actual)) - self.assertEquals('FooTest.Bar', actual[0].GetName()) - self.assertEquals(1, actual[0].GetDuration()) - self.assertEquals(base_test_result.ResultType.PASS, actual[0].GetType()) + self.assertEqual(1, len(actual)) + self.assertEqual('FooTest.Bar', actual[0].GetName()) + self.assertEqual(1, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.PASS, actual[0].GetType()) def testParseGTestOutput_fail(self): raw_output = [ @@ -111,10 +110,10 @@ def testParseGTestOutput_fail(self): '[ FAILED ] FooTest.Bar (1 ms)', ] actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) - self.assertEquals(1, len(actual)) - self.assertEquals('FooTest.Bar', actual[0].GetName()) - self.assertEquals(1, actual[0].GetDuration()) - self.assertEquals(base_test_result.ResultType.FAIL, actual[0].GetType()) + self.assertEqual(1, len(actual)) + self.assertEqual('FooTest.Bar', actual[0].GetName()) + self.assertEqual(1, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.FAIL, actual[0].GetType()) def testParseGTestOutput_crash(self): raw_output = [ @@ -122,10 +121,10 @@ def testParseGTestOutput_crash(self): '[ CRASHED ] FooTest.Bar (1 ms)', ] actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) - self.assertEquals(1, len(actual)) - self.assertEquals('FooTest.Bar', actual[0].GetName()) - self.assertEquals(1, actual[0].GetDuration()) - self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType()) + self.assertEqual(1, len(actual)) + self.assertEqual('FooTest.Bar', actual[0].GetName()) + self.assertEqual(1, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.CRASH, actual[0].GetType()) def testParseGTestOutput_errorCrash(self): raw_output = [ @@ -133,10 +132,10 @@ def testParseGTestOutput_errorCrash(self): '[ERROR:blah] Currently running: FooTest.Bar', ] actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) - self.assertEquals(1, len(actual)) - self.assertEquals('FooTest.Bar', actual[0].GetName()) + self.assertEqual(1, len(actual)) + self.assertEqual('FooTest.Bar', actual[0].GetName()) self.assertIsNone(actual[0].GetDuration()) - self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType()) + self.assertEqual(base_test_result.ResultType.CRASH, actual[0].GetType()) def testParseGTestOutput_fatalDcheck(self): raw_output = [ @@ -144,20 +143,20 @@ def testParseGTestOutput_fatalDcheck(self): '[0324/183029.116334:FATAL:test_timeouts.cc(103)] Check failed: !init', ] actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) - self.assertEquals(1, len(actual)) - self.assertEquals('FooTest.Bar', actual[0].GetName()) + self.assertEqual(1, len(actual)) + self.assertEqual('FooTest.Bar', actual[0].GetName()) self.assertIsNone(actual[0].GetDuration()) - self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType()) + self.assertEqual(base_test_result.ResultType.CRASH, actual[0].GetType()) def testParseGTestOutput_unknown(self): raw_output = [ '[ RUN ] FooTest.Bar', ] actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) - self.assertEquals(1, len(actual)) - self.assertEquals('FooTest.Bar', actual[0].GetName()) - self.assertEquals(0, actual[0].GetDuration()) - self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType()) + self.assertEqual(1, len(actual)) + self.assertEqual('FooTest.Bar', actual[0].GetName()) + self.assertEqual(0, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.CRASH, actual[0].GetType()) def testParseGTestOutput_nonterminalUnknown(self): raw_output = [ @@ -166,15 +165,15 @@ def testParseGTestOutput_nonterminalUnknown(self): '[ OK ] FooTest.Baz (1 ms)', ] actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) - self.assertEquals(2, len(actual)) + self.assertEqual(2, len(actual)) - self.assertEquals('FooTest.Bar', actual[0].GetName()) - self.assertEquals(0, actual[0].GetDuration()) - self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType()) + self.assertEqual('FooTest.Bar', actual[0].GetName()) + self.assertEqual(0, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.CRASH, actual[0].GetType()) - self.assertEquals('FooTest.Baz', actual[1].GetName()) - self.assertEquals(1, actual[1].GetDuration()) - self.assertEquals(base_test_result.ResultType.PASS, actual[1].GetType()) + self.assertEqual('FooTest.Baz', actual[1].GetName()) + self.assertEqual(1, actual[1].GetDuration()) + self.assertEqual(base_test_result.ResultType.PASS, actual[1].GetType()) def testParseGTestOutput_deathTestCrashOk(self): raw_output = [ @@ -183,11 +182,11 @@ def testParseGTestOutput_deathTestCrashOk(self): '[ OK ] FooTest.Bar (1 ms)', ] actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) - self.assertEquals(1, len(actual)) + self.assertEqual(1, len(actual)) - self.assertEquals('FooTest.Bar', actual[0].GetName()) - self.assertEquals(1, actual[0].GetDuration()) - self.assertEquals(base_test_result.ResultType.PASS, actual[0].GetType()) + self.assertEqual('FooTest.Bar', actual[0].GetName()) + self.assertEqual(1, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.PASS, actual[0].GetType()) def testParseGTestOutput_typeParameterized(self): raw_output = [ @@ -195,10 +194,10 @@ def testParseGTestOutput_typeParameterized(self): '[ FAILED ] Baz/FooTest.Bar/0, where TypeParam = (1 ms)', ] actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) - self.assertEquals(1, len(actual)) - self.assertEquals('Baz/FooTest.Bar/0', actual[0].GetName()) - self.assertEquals(1, actual[0].GetDuration()) - self.assertEquals(base_test_result.ResultType.FAIL, actual[0].GetType()) + self.assertEqual(1, len(actual)) + self.assertEqual('Baz/FooTest.Bar/0', actual[0].GetName()) + self.assertEqual(1, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.FAIL, actual[0].GetType()) def testParseGTestOutput_valueParameterized(self): raw_output = [ @@ -207,10 +206,10 @@ def testParseGTestOutput_valueParameterized(self): ' where GetParam() = 4-byte object <00-00 00-00> (1 ms)', ] actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) - self.assertEquals(1, len(actual)) - self.assertEquals('Baz/FooTest.Bar/0', actual[0].GetName()) - self.assertEquals(1, actual[0].GetDuration()) - self.assertEquals(base_test_result.ResultType.FAIL, actual[0].GetType()) + self.assertEqual(1, len(actual)) + self.assertEqual('Baz/FooTest.Bar/0', actual[0].GetName()) + self.assertEqual(1, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.FAIL, actual[0].GetType()) def testParseGTestOutput_typeAndValueParameterized(self): raw_output = [ @@ -219,10 +218,10 @@ def testParseGTestOutput_typeAndValueParameterized(self): ' where TypeParam = and GetParam() = (1 ms)', ] actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) - self.assertEquals(1, len(actual)) - self.assertEquals('Baz/FooTest.Bar/0', actual[0].GetName()) - self.assertEquals(1, actual[0].GetDuration()) - self.assertEquals(base_test_result.ResultType.FAIL, actual[0].GetType()) + self.assertEqual(1, len(actual)) + self.assertEqual('Baz/FooTest.Bar/0', actual[0].GetName()) + self.assertEqual(1, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.FAIL, actual[0].GetType()) def testParseGTestOutput_skippedTest(self): raw_output = [ @@ -230,18 +229,18 @@ def testParseGTestOutput_skippedTest(self): '[ SKIPPED ] FooTest.Bar (1 ms)', ] actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) - self.assertEquals(1, len(actual)) - self.assertEquals('FooTest.Bar', actual[0].GetName()) - self.assertEquals(1, actual[0].GetDuration()) - self.assertEquals(base_test_result.ResultType.SKIP, actual[0].GetType()) + self.assertEqual(1, len(actual)) + self.assertEqual('FooTest.Bar', actual[0].GetName()) + self.assertEqual(1, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.SKIP, actual[0].GetType()) def testParseGTestXML_none(self): actual = gtest_test_instance.ParseGTestXML(None) - self.assertEquals([], actual) + self.assertEqual([], actual) def testParseGTestJSON_none(self): actual = gtest_test_instance.ParseGTestJSON(None) - self.assertEquals([], actual) + self.assertEqual([], actual) def testParseGTestJSON_example(self): raw_json = """ @@ -276,10 +275,41 @@ def testParseGTestJSON_example(self): } }""" actual = gtest_test_instance.ParseGTestJSON(raw_json) - self.assertEquals(1, len(actual)) - self.assertEquals('mojom_tests.parse.ast_unittest.ASTTest.testNodeBase', - actual[0].GetName()) - self.assertEquals(base_test_result.ResultType.PASS, actual[0].GetType()) + self.assertEqual(1, len(actual)) + self.assertEqual('mojom_tests.parse.ast_unittest.ASTTest.testNodeBase', + actual[0].GetName()) + self.assertEqual(base_test_result.ResultType.PASS, actual[0].GetType()) + + def testParseGTestJSON_skippedTest_example(self): + raw_json = """ + { + "tests": { + "mojom_tests": { + "parse": { + "ast_unittest": { + "ASTTest": { + "testNodeBase": { + "expected": "SKIP", + "actual": "SKIP" + } + } + } + } + } + }, + "interrupted": false, + "path_delimiter": ".", + "version": 3, + "seconds_since_epoch": 1406662283.764424, + "num_failures_by_type": { + "SKIP": 1 + } + }""" + actual = gtest_test_instance.ParseGTestJSON(raw_json) + self.assertEqual(1, len(actual)) + self.assertEqual('mojom_tests.parse.ast_unittest.ASTTest.testNodeBase', + actual[0].GetName()) + self.assertEqual(base_test_result.ResultType.SKIP, actual[0].GetType()) def testParseGTestJSON_skippedTest_example(self): raw_json = """ @@ -322,7 +352,7 @@ def testTestNameWithoutDisabledPrefix_disabled(self): actual = gtest_test_instance \ .TestNameWithoutDisabledPrefix(test_name) expected = 'A.B' - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) def testTestNameWithoutDisabledPrefix_flaky(self): test_name_list = [ @@ -334,14 +364,14 @@ def testTestNameWithoutDisabledPrefix_flaky(self): actual = gtest_test_instance \ .TestNameWithoutDisabledPrefix(test_name) expected = 'A.B' - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) def testTestNameWithoutDisabledPrefix_notDisabledOrFlaky(self): test_name = 'A.B' actual = gtest_test_instance \ .TestNameWithoutDisabledPrefix(test_name) expected = 'A.B' - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) if __name__ == '__main__': diff --git a/build/android/pylib/instrumentation/__init__.py b/build/android/pylib/instrumentation/__init__.py index 96196cffb272..5ffa28413724 100644 --- a/build/android/pylib/instrumentation/__init__.py +++ b/build/android/pylib/instrumentation/__init__.py @@ -1,3 +1,3 @@ -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/instrumentation/instrumentation_parser.py b/build/android/pylib/instrumentation/instrumentation_parser.py index fd875cb12074..700d2415e9c6 100644 --- a/build/android/pylib/instrumentation/instrumentation_parser.py +++ b/build/android/pylib/instrumentation/instrumentation_parser.py @@ -1,8 +1,8 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import + import logging import re @@ -34,7 +34,7 @@ _INSTR_LINE_RE = re.compile(r'^\s*INSTRUMENTATION_([A-Z_]+): (.*)$') -class InstrumentationParser(object): +class InstrumentationParser: def __init__(self, stream): """An incremental parser for the output of Android instrumentation tests. diff --git a/build/android/pylib/instrumentation/instrumentation_parser_test.py b/build/android/pylib/instrumentation/instrumentation_parser_test.py index 00e4b9103ec5..dccb58a3fe03 100755 --- a/build/android/pylib/instrumentation/instrumentation_parser_test.py +++ b/build/android/pylib/instrumentation/instrumentation_parser_test.py @@ -1,12 +1,12 @@ -#!/usr/bin/env vpython -# Copyright 2015 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Unit tests for instrumentation.InstrumentationParser.""" -from __future__ import absolute_import + import unittest from pylib.instrumentation import instrumentation_parser diff --git a/build/android/pylib/instrumentation/instrumentation_test_instance.py b/build/android/pylib/instrumentation/instrumentation_test_instance.py index aba3663ba627..f520879a60f9 100644 --- a/build/android/pylib/instrumentation/instrumentation_test_instance.py +++ b/build/android/pylib/instrumentation/instrumentation_test_instance.py @@ -1,8 +1,8 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import + import copy import logging import os @@ -22,8 +22,6 @@ from pylib.symbols import stack_symbolizer from pylib.utils import dexdump from pylib.utils import gold_utils -from pylib.utils import instrumentation_tracing -from pylib.utils import proguard from pylib.utils import shared_preference_utils from pylib.utils import test_filter @@ -38,9 +36,11 @@ _COMMAND_LINE_PARAMETER = 'cmdlinearg-parameter' _DEFAULT_ANNOTATIONS = [ 'SmallTest', 'MediumTest', 'LargeTest', 'EnormousTest', 'IntegrationTest'] +# This annotation is for disabled tests that should not be run in Test Reviver. +_DO_NOT_REVIVE_ANNOTATIONS = ['DoNotRevive', 'Manual'] _EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS = [ 'DisabledTest', 'FlakyTest', 'Manual'] -_VALID_ANNOTATIONS = set(_DEFAULT_ANNOTATIONS + +_VALID_ANNOTATIONS = set(_DEFAULT_ANNOTATIONS + _DO_NOT_REVIVE_ANNOTATIONS + _EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS) _TEST_LIST_JUNIT4_RUNNERS = [ @@ -72,15 +72,14 @@ class MissingSizeAnnotationError(test_exception.TestException): def __init__(self, class_name): - super(MissingSizeAnnotationError, self).__init__(class_name + + super().__init__( + class_name + ': Test method is missing required size annotation. Add one of: ' + ', '.join('@' + a for a in _VALID_ANNOTATIONS)) class CommandLineParameterizationException(test_exception.TestException): - - def __init__(self, msg): - super(CommandLineParameterizationException, self).__init__(msg) + pass class TestListPickleException(test_exception.TestException): @@ -205,15 +204,36 @@ def GenerateTestResults(result_code, result_bundle, statuses, duration_ms, def _MaybeSetLog(bundle, current_result, symbolizer, device_abi): if _BUNDLE_STACK_ID in bundle: + stack = bundle[_BUNDLE_STACK_ID] if symbolizer and device_abi: - current_result.SetLog('%s\n%s' % (bundle[_BUNDLE_STACK_ID], '\n'.join( - symbolizer.ExtractAndResolveNativeStackTraces( - bundle[_BUNDLE_STACK_ID], device_abi)))) + current_result.SetLog('%s\n%s' % (stack, '\n'.join( + symbolizer.ExtractAndResolveNativeStackTraces(stack, device_abi)))) else: - current_result.SetLog(bundle[_BUNDLE_STACK_ID]) + current_result.SetLog(stack) + + current_result.SetFailureReason(_ParseExceptionMessage(stack)) -def FilterTests(tests, filter_str=None, annotations=None, +def _ParseExceptionMessage(stack): + """Extracts the exception message from the given stack trace. + """ + # This interprets stack traces reported via InstrumentationResultPrinter: + # https://source.chromium.org/chromium/chromium/src/+/main:third_party/android_support_test_runner/runner/src/main/java/android/support/test/internal/runner/listener/InstrumentationResultPrinter.java;l=181?q=InstrumentationResultPrinter&type=cs + # This is a standard Java stack trace, of the form: + # + # at SomeClass.SomeMethod(...) + # at ... + lines = stack.split('\n') + for i, line in enumerate(lines): + if line.startswith('\tat'): + return '\n'.join(lines[0:i]) + # No call stack found, so assume everything is the exception message. + return stack + + +def FilterTests(tests, + filter_strs=None, + annotations=None, excluded_annotations=None): """Filter a list of tests @@ -221,41 +241,149 @@ def FilterTests(tests, filter_str=None, annotations=None, tests: a list of tests. e.g. [ {'annotations": {}, 'class': 'com.example.TestA', 'method':'test1'}, {'annotations": {}, 'class': 'com.example.TestB', 'method':'test2'}] - filter_str: googletest-style filter string. + filter_strs: list of googletest-style filter string. annotations: a dict of wanted annotations for test methods. - exclude_annotations: a dict of annotations to exclude. + excluded_annotations: a dict of annotations to exclude. Return: A list of filtered tests """ - def gtest_filter(t): - if not filter_str: - return True + + def test_names_from_pattern(combined_pattern, test_names): + patterns = combined_pattern.split(':') + + hashable_patterns = set() + filename_patterns = [] + for pattern in patterns: + if ('*' in pattern or '?' in pattern or '[' in pattern): + filename_patterns.append(pattern) + else: + hashable_patterns.add(pattern) + + filter_test_names = set( + unittest_util.FilterTestNames(test_names, ':'.join( + filename_patterns))) if len(filename_patterns) > 0 else set() + + for test_name in test_names: + if test_name in hashable_patterns: + filter_test_names.add(test_name) + + return filter_test_names + + def get_test_names(test): + test_names = set() # Allow fully-qualified name as well as an omitted package. unqualified_class_test = { - 'class': t['class'].split('.')[-1], - 'method': t['method'] + 'class': test['class'].split('.')[-1], + 'method': test['method'] } - names = [ - GetTestName(t, sep='.'), - GetTestName(unqualified_class_test, sep='.'), - GetUniqueTestName(t, sep='.') - ] - - if t['is_junit4']: - names += [ - GetTestNameWithoutParameterPostfix(t, sep='.'), - GetTestNameWithoutParameterPostfix(unqualified_class_test, sep='.') - ] - pattern_groups = filter_str.split('-') - if len(pattern_groups) > 1: - negative_filter = pattern_groups[1] - if unittest_util.FilterTestNames(names, negative_filter): - return [] + test_name = GetTestName(test, sep='.') + test_names.add(test_name) + + unqualified_class_test_name = GetTestName(unqualified_class_test, sep='.') + test_names.add(unqualified_class_test_name) - positive_filter = pattern_groups[0] - return unittest_util.FilterTestNames(names, positive_filter) + unique_test_name = GetUniqueTestName(test, sep='.') + test_names.add(unique_test_name) + + if test['is_junit4']: + junit4_test_name = GetTestNameWithoutParameterPostfix(test, sep='.') + test_names.add(junit4_test_name) + + unqualified_junit4_test_name = \ + GetTestNameWithoutParameterPostfix(unqualified_class_test, sep='.') + test_names.add(unqualified_junit4_test_name) + return test_names + + def get_tests_from_names(tests, test_names, tests_to_names): + ''' Returns the tests for which the given names apply + + Args: + tests: a list of tests. e.g. [ + {'annotations": {}, 'class': 'com.example.TestA', 'method':'test1'}, + {'annotations": {}, 'class': 'com.example.TestB', 'method':'test2'}] + test_names: a collection of names determining tests to return. + + Return: + A list of tests that match the given test names + ''' + filtered_tests = [] + for t in tests: + current_test_names = tests_to_names[id(t)] + + for current_test_name in current_test_names: + if current_test_name in test_names: + filtered_tests.append(t) + break + + return filtered_tests + + def remove_tests_from_names(tests, remove_test_names, tests_to_names): + ''' Returns the tests from the given list with given names removed + + Args: + tests: a list of tests. e.g. [ + {'annotations": {}, 'class': 'com.example.TestA', 'method':'test1'}, + {'annotations": {}, 'class': 'com.example.TestB', 'method':'test2'}] + remove_test_names: a collection of names determining tests to remove. + tests_to_names: a dcitionary of test ids to a collection of applicable + names for that test + + Return: + A list of tests that don't match the given test names + ''' + filtered_tests = [] + + for t in tests: + for name in tests_to_names[id(t)]: + if name in remove_test_names: + break + else: + filtered_tests.append(t) + return filtered_tests + + def gtests_filter(tests, combined_filters): + ''' Returns the tests after the combined_filters have been applied + + Args: + tests: a list of tests. e.g. [ + {'annotations": {}, 'class': 'com.example.TestA', 'method':'test1'}, + {'annotations": {}, 'class': 'com.example.TestB', 'method':'test2'}] + combined_filters: the filter string representing tests to exclude + + Return: + A list of tests that should still be included after the combined_filters + are applied to their names + ''' + + if not combined_filters: + return tests + + # Collect all test names + all_test_names = set() + tests_to_names = {} + for t in tests: + tests_to_names[id(t)] = get_test_names(t) + for name in tests_to_names[id(t)]: + all_test_names.add(name) + + for combined_filter in combined_filters: + pattern_groups = combined_filter.split('-') + negative_pattern = pattern_groups[1] if len(pattern_groups) > 1 else None + positive_pattern = pattern_groups[0] + if positive_pattern: + # Only use the test names that match the positive pattern + positive_test_names = test_names_from_pattern(positive_pattern, + all_test_names) + tests = get_tests_from_names(tests, positive_test_names, tests_to_names) + + if negative_pattern: + # Remove any test the negative filter matches + remove_names = test_names_from_pattern(negative_pattern, all_test_names) + tests = remove_tests_from_names(tests, remove_names, tests_to_names) + + return tests def annotation_filter(all_annotations): if not annotations: @@ -277,24 +405,19 @@ def any_annotation_matches(filter_annotations, all_annotations): def annotation_value_matches(filter_av, av): if filter_av is None: return True - elif isinstance(av, dict): + if isinstance(av, dict): tav_from_dict = av['value'] # If tav_from_dict is an int, the 'in' operator breaks, so convert # filter_av and manually compare. See https://crbug.com/1019707 if isinstance(tav_from_dict, int): return int(filter_av) == tav_from_dict - else: - return filter_av in tav_from_dict - elif isinstance(av, list): + return filter_av in tav_from_dict + if isinstance(av, list): return filter_av in av return filter_av == av - filtered_tests = [] - for t in tests: - # Gtest filtering - if not gtest_filter(t): - continue - + return_tests = [] + for t in gtests_filter(tests, filter_strs): # Enforce that all tests declare their size. if not any(a in _VALID_ANNOTATIONS for a in t['annotations']): raise MissingSizeAnnotationError(GetTestName(t)) @@ -302,23 +425,9 @@ def annotation_value_matches(filter_av, av): if (not annotation_filter(t['annotations']) or not excluded_annotation_filter(t['annotations'])): continue + return_tests.append(t) - filtered_tests.append(t) - - return filtered_tests - - -# TODO(yolandyan): remove this once the tests are converted to junit4 -def GetAllTestsFromJar(test_jar): - pickle_path = '%s-proguard.pickle' % test_jar - try: - tests = GetTestsFromPickle(pickle_path, os.path.getmtime(test_jar)) - except TestListPickleException as e: - logging.info('Could not get tests from pickle: %s', e) - logging.info('Getting tests from JAR via proguard.') - tests = _GetTestsFromProguard(test_jar) - SaveTestsToPickle(pickle_path, tests) - return tests + return return_tests def GetAllTestsFromApk(test_apk): @@ -332,74 +441,59 @@ def GetAllTestsFromApk(test_apk): SaveTestsToPickle(pickle_path, tests) return tests + def GetTestsFromPickle(pickle_path, test_mtime): if not os.path.exists(pickle_path): raise TestListPickleException('%s does not exist.' % pickle_path) if os.path.getmtime(pickle_path) <= test_mtime: raise TestListPickleException('File is stale: %s' % pickle_path) - with open(pickle_path, 'r') as f: + with open(pickle_path, 'rb') as f: pickle_data = pickle.load(f) if pickle_data['VERSION'] != _PICKLE_FORMAT_VERSION: raise TestListPickleException('PICKLE_FORMAT_VERSION has changed.') return pickle_data['TEST_METHODS'] -# TODO(yolandyan): remove this once the test listing from java runner lands -@instrumentation_tracing.no_tracing -def _GetTestsFromProguard(jar_path): - p = proguard.Dump(jar_path) - class_lookup = dict((c['class'], c) for c in p['classes']) - - def is_test_class(c): - return c['class'].endswith('Test') +def _GetTestsFromDexdump(test_apk): + dex_dumps = dexdump.Dump(test_apk) + tests = [] - def is_test_method(m): - return m['method'].startswith('test') + def get_test_methods(methods, annotations): + test_methods = [] - def recursive_class_annotations(c): - s = c['superclass'] - if s in class_lookup: - a = recursive_class_annotations(class_lookup[s]) - else: - a = {} - a.update(c['annotations']) - return a - - def stripped_test_class(c): - return { - 'class': c['class'], - 'annotations': recursive_class_annotations(c), - 'methods': [m for m in c['methods'] if is_test_method(m)], - 'superclass': c['superclass'], - } + for method in methods: + if method.startswith('test'): + method_annotations = annotations.get(method, {}) - return [stripped_test_class(c) for c in p['classes'] - if is_test_class(c)] + # Dexdump used to not return any annotation info + # So MediumTest annotation was added to all methods + # Preserving this behaviour by adding MediumTest if none of the + # size annotations are included in these annotations + if not any(valid in method_annotations for valid in _VALID_ANNOTATIONS): + method_annotations.update({'MediumTest': None}) + test_methods.append({ + 'method': method, + 'annotations': method_annotations + }) -def _GetTestsFromDexdump(test_apk): - dex_dumps = dexdump.Dump(test_apk) - tests = [] - - def get_test_methods(methods): - return [ - { - 'method': m, - # No annotation info is available from dexdump. - # Set MediumTest annotation for default. - 'annotations': {'MediumTest': None}, - } for m in methods if m.startswith('test')] + return test_methods for dump in dex_dumps: for package_name, package_info in six.iteritems(dump): for class_name, class_info in six.iteritems(package_info['classes']): - if class_name.endswith('Test'): + if class_name.endswith('Test') and not class_info['is_abstract']: + classAnnotations, methodsAnnotations = class_info['annotations'] tests.append({ - 'class': '%s.%s' % (package_name, class_name), - 'annotations': {}, - 'methods': get_test_methods(class_info['methods']), - 'superclass': class_info['superclass'], + 'class': + '%s.%s' % (package_name, class_name), + 'annotations': + classAnnotations, + 'methods': + get_test_methods(class_info['methods'], methodsAnnotations), + 'superclass': + class_info['superclass'], }) return tests @@ -408,7 +502,7 @@ def SaveTestsToPickle(pickle_path, tests): 'VERSION': _PICKLE_FORMAT_VERSION, 'TEST_METHODS': tests, } - with open(pickle_path, 'w') as pickle_file: + with open(pickle_path, 'wb') as pickle_file: pickle.dump(pickle_data, pickle_file) @@ -416,7 +510,7 @@ class MissingJUnit4RunnerException(test_exception.TestException): """Raised when JUnit4 runner is not provided or specified in apk manifest""" def __init__(self): - super(MissingJUnit4RunnerException, self).__init__( + super().__init__( 'JUnit4 runner is not provided or specified in test apk manifest.') @@ -488,9 +582,12 @@ def GetUniqueTestName(test, sep='#'): class InstrumentationTestInstance(test_instance.TestInstance): def __init__(self, args, data_deps_delegate, error_func): - super(InstrumentationTestInstance, self).__init__() + super().__init__() self._additional_apks = [] + self._additional_apexs = [] + self._forced_queryable_additional_apks = [] + self._instant_additional_apks = [] self._apk_under_test = None self._apk_under_test_incremental_install_json = None self._modules = None @@ -499,8 +596,8 @@ def __init__(self, args, data_deps_delegate, error_func): self._package_info = None self._suite = None self._test_apk = None + self._test_apk_as_instant = False self._test_apk_incremental_install_json = None - self._test_jar = None self._test_package = None self._junit3_runner_class = None self._junit4_runner_class = None @@ -511,13 +608,18 @@ def __init__(self, args, data_deps_delegate, error_func): self._data_deps = None self._data_deps_delegate = None self._runtime_deps_path = None + self._store_data_in_app_directory = False self._initializeDataDependencyAttributes(args, data_deps_delegate) self._annotations = None self._excluded_annotations = None - self._test_filter = None + self._test_filters = None self._initializeTestFilterAttributes(args) + self._run_setup_commands = [] + self._run_teardown_commands = [] + self._initializeSetupTeardownCommandAttributes(args) + self._flags = None self._use_apk_under_test_flags_file = False self._initializeFlagAttributes(args) @@ -528,12 +630,12 @@ def __init__(self, args, data_deps_delegate, error_func): self._initializeTestControlAttributes(args) self._coverage_directory = None - self._jacoco_coverage_type = None self._initializeTestCoverageAttributes(args) self._store_tombstones = False self._symbolizer = None - self._enable_java_deobfuscation = False + self._enable_breakpad_dump = False + self._proguard_mapping_path = None self._deobfuscator = None self._initializeLogAttributes(args) @@ -546,17 +648,30 @@ def __init__(self, args, data_deps_delegate, error_func): self._system_packages_to_remove = None self._initializeSystemPackagesToRemoveAttributes(args) + self._use_voice_interaction_service = None + self._initializeUseVoiceInteractionService(args) + self._use_webview_provider = None self._initializeUseWebviewProviderAttributes(args) self._skia_gold_properties = None self._initializeSkiaGoldAttributes(args) + self._test_launcher_batch_limit = None + self._initializeTestLauncherAttributes(args) + + self._approve_app_links_domain = None + self._approve_app_links_package = None + self._initializeApproveAppLinksAttributes(args) + self._wpr_enable_record = args.wpr_enable_record self._external_shard_index = args.test_launcher_shard_index self._total_external_shards = args.test_launcher_total_shards + self._is_unit_test = False + self._initializeUnitTestFlag(args) + def _initializeApkAttributes(self, args, error_func): if args.apk_under_test: apk_under_test_path = args.apk_under_test @@ -590,6 +705,8 @@ def _initializeApkAttributes(self, args, error_func): self._test_apk = apk_helper.ToHelper(test_apk_path) self._suite = os.path.splitext(os.path.basename(args.test_apk))[0] + self._test_apk_as_instant = args.test_apk_as_instant + self._apk_under_test_incremental_install_json = ( args.apk_under_test_incremental_install_json) self._test_apk_incremental_install_json = ( @@ -603,18 +720,10 @@ def _initializeApkAttributes(self, args, error_func): self._fake_modules = args.fake_modules self._additional_locales = args.additional_locales - self._test_jar = args.test_jar self._test_support_apk = apk_helper.ToHelper(os.path.join( constants.GetOutDirectory(), constants.SDK_BUILD_TEST_JAVALIB_DIR, '%sSupport.apk' % self._suite)) - if not self._test_jar: - logging.warning('Test jar not specified. Test runner will not have ' - 'Java annotation info available. May not handle test ' - 'timeouts correctly.') - elif not os.path.exists(self._test_jar): - error_func('Unable to find test JAR: %s' % self._test_jar) - self._test_package = self._test_apk.GetPackageName() all_instrumentations = self._test_apk.GetAllInstrumentations() all_junit3_runner_classes = [ @@ -655,27 +764,37 @@ def _initializeApkAttributes(self, args, error_func): self._package_info = package_info break if not self._package_info: - logging.warning(("Unable to find package info for %s. " + - "(This may just mean that the test package is " + - "currently being installed.)"), - self._test_package) + logging.warning( + 'Unable to find package info for %s. ' + '(This may just mean that the test package is ' + 'currently being installed.)', self._test_package) + + for x in set(args.additional_apks + args.forced_queryable_additional_apks + + args.instant_additional_apks): + if not os.path.exists(x): + error_func('Unable to find additional APK: %s' % x) + + apk = apk_helper.ToHelper(x) + self._additional_apks.append(apk) - for apk in args.additional_apks: - if not os.path.exists(apk): - error_func('Unable to find additional APK: %s' % apk) - self._additional_apks = ( - [apk_helper.ToHelper(x) for x in args.additional_apks]) + if x in args.forced_queryable_additional_apks: + self._forced_queryable_additional_apks.append(apk) + + if x in args.instant_additional_apks: + self._instant_additional_apks.append(apk) + + self._additional_apexs = args.additional_apexs def _initializeDataDependencyAttributes(self, args, data_deps_delegate): self._data_deps = [] self._data_deps_delegate = data_deps_delegate self._runtime_deps_path = args.runtime_deps_path - + self._store_data_in_app_directory = args.store_data_in_app_directory if not self._runtime_deps_path: logging.warning('No data dependencies will be pushed.') def _initializeTestFilterAttributes(self, args): - self._test_filter = test_filter.InitializeFilterFromArgs(args) + self._test_filters = test_filter.InitializeFiltersFromArgs(args) def annotation_element(a): a = a.split('=', 1) @@ -684,7 +803,7 @@ def annotation_element(a): if args.annotation_str: self._annotations = [ annotation_element(a) for a in args.annotation_str.split(',')] - elif not self._test_filter: + elif not self._test_filters: self._annotations = [ annotation_element(a) for a in _DEFAULT_ANNOTATIONS] else: @@ -697,11 +816,19 @@ def annotation_element(a): self._excluded_annotations = [] requested_annotations = set(a[0] for a in self._annotations) - if not args.run_disabled: + if args.run_disabled: + self._excluded_annotations.extend( + annotation_element(a) for a in _DO_NOT_REVIVE_ANNOTATIONS + if a not in requested_annotations) + else: self._excluded_annotations.extend( annotation_element(a) for a in _EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS if a not in requested_annotations) + def _initializeSetupTeardownCommandAttributes(self, args): + self._run_setup_commands = args.run_setup_commands + self._run_teardown_commands = args.run_teardown_commands + def _initializeFlagAttributes(self, args): self._use_apk_under_test_flags_file = args.use_apk_under_test_flags_file self._flags = ['--enable-test-intents'] @@ -724,15 +851,10 @@ def _initializeTestControlAttributes(self, args): def _initializeTestCoverageAttributes(self, args): self._coverage_directory = args.coverage_dir - if ("Batch", "UnitTests") in self._annotations and ( - "Batch", "UnitTests") not in self._excluded_annotations: - self._jacoco_coverage_type = "unit_tests_only" - elif ("Batch", "UnitTests") not in self._annotations and ( - "Batch", "UnitTests") in self._excluded_annotations: - self._jacoco_coverage_type = "unit_tests_excluded" def _initializeLogAttributes(self, args): - self._enable_java_deobfuscation = args.enable_java_deobfuscation + self._enable_breakpad_dump = args.enable_breakpad_dump + self._proguard_mapping_path = args.proguard_mapping_path self._store_tombstones = args.store_tombstones self._symbolizer = stack_symbolizer.Symbolizer( self.apk_under_test.path if self.apk_under_test else None) @@ -758,6 +880,12 @@ def _initializeSystemPackagesToRemoveAttributes(self, args): return self._system_packages_to_remove = args.system_packages_to_remove + def _initializeUseVoiceInteractionService(self, args): + if (not hasattr(args, 'use_voice_interaction_service') + or not args.use_voice_interaction_service): + return + self._use_voice_interaction_service = args.use_voice_interaction_service + def _initializeUseWebviewProviderAttributes(self, args): if (not hasattr(args, 'use_webview_provider') or not args.use_webview_provider): @@ -767,10 +895,35 @@ def _initializeUseWebviewProviderAttributes(self, args): def _initializeSkiaGoldAttributes(self, args): self._skia_gold_properties = gold_utils.AndroidSkiaGoldProperties(args) + def _initializeTestLauncherAttributes(self, args): + if hasattr(args, 'test_launcher_batch_limit'): + self._test_launcher_batch_limit = args.test_launcher_batch_limit + + def _initializeApproveAppLinksAttributes(self, args): + if (not hasattr(args, 'approve_app_links') or not args.approve_app_links): + return + + # The argument will be formatted as com.android.thing:www.example.com . + app_links = args.approve_app_links.split(':') + + if (len(app_links) != 2 or not app_links[0] or not app_links[1]): + logging.warning('--approve_app_links option provided, but malformed.') + return + + self._approve_app_links_package = app_links[0] + self._approve_app_links_domain = app_links[1] + + def _initializeUnitTestFlag(self, args): + self._is_unit_test = args.is_unit_test + @property def additional_apks(self): return self._additional_apks + @property + def additional_apexs(self): + return self._additional_apexs + @property def apk_under_test(self): return self._apk_under_test @@ -779,6 +932,14 @@ def apk_under_test(self): def apk_under_test_incremental_install_json(self): return self._apk_under_test_incremental_install_json + @property + def approve_app_links_package(self): + return self._approve_app_links_package + + @property + def approve_app_links_domain(self): + return self._approve_app_links_domain + @property def modules(self): return self._modules @@ -799,6 +960,10 @@ def coverage_directory(self): def edit_shared_prefs(self): return self._edit_shared_prefs + @property + def enable_breakpad_dump(self): + return self._enable_breakpad_dump + @property def external_shard_index(self): return self._external_shard_index @@ -808,8 +973,8 @@ def flags(self): return self._flags @property - def jacoco_coverage_type(self): - return self._jacoco_coverage_type + def is_unit_test(self): + return self._is_unit_test @property def junit3_runner_class(self): @@ -831,6 +996,18 @@ def package_info(self): def replace_system_package(self): return self._replace_system_package + @property + def run_setup_commands(self): + return self._run_setup_commands + + @property + def run_teardown_commands(self): + return self._run_teardown_commands + + @property + def use_voice_interaction_service(self): + return self._use_voice_interaction_service + @property def use_webview_provider(self): return self._use_webview_provider @@ -843,6 +1020,10 @@ def screenshot_dir(self): def skia_gold_properties(self): return self._skia_gold_properties + @property + def store_data_in_app_directory(self): + return self._store_data_in_app_directory + @property def store_tombstones(self): return self._store_tombstones @@ -863,13 +1044,21 @@ def system_packages_to_remove(self): def test_apk(self): return self._test_apk + @property + def test_apk_as_instant(self): + return self._test_apk_as_instant + @property def test_apk_incremental_install_json(self): return self._test_apk_incremental_install_json @property - def test_jar(self): - return self._test_jar + def test_filters(self): + return self._test_filters + + @property + def test_launcher_batch_limit(self): + return self._test_launcher_batch_limit @property def test_support_apk(self): @@ -923,18 +1112,20 @@ def GetPreferredAbis(self): def SetUp(self): self._data_deps.extend( self._data_deps_delegate(self._runtime_deps_path)) - if self._enable_java_deobfuscation: + if self._proguard_mapping_path: self._deobfuscator = deobfuscator.DeobfuscatorPool( - self.test_apk.path + '.mapping') + self._proguard_mapping_path) def GetDataDependencies(self): return self._data_deps def GetTests(self): - if self.test_jar: - raw_tests = GetAllTestsFromJar(self.test_jar) - else: - raw_tests = GetAllTestsFromApk(self.test_apk.path) + if self._test_apk_incremental_install_json: + # Would likely just be a matter of calling GetAllTestsFromApk on all + # .dex files listed in the .json. + raise Exception('Support not implemented for incremental_install=true on ' + 'tests that do not use //base\'s test runner.') + raw_tests = GetAllTestsFromApk(self.test_apk.path) return self.ProcessRawTests(raw_tests) def MaybeDeobfuscateLines(self, lines): @@ -948,15 +1139,20 @@ def ProcessRawTests(self, raw_tests): if self._junit4_runner_class is None and any( t['is_junit4'] for t in inflated_tests): raise MissingJUnit4RunnerException() - filtered_tests = FilterTests( - inflated_tests, self._test_filter, self._annotations, - self._excluded_annotations) - if self._test_filter and not filtered_tests: + filtered_tests = FilterTests(inflated_tests, self._test_filters, + self._annotations, self._excluded_annotations) + if self._test_filters and not filtered_tests: for t in inflated_tests: logging.debug(' %s', GetUniqueTestName(t)) - logging.warning('Unmatched Filter: %s', self._test_filter) + logging.warning('Unmatched Filters: %s', self._test_filters) return filtered_tests + def IsApkForceQueryable(self, apk): + return apk in self._forced_queryable_additional_apks + + def IsApkInstant(self, apk): + return apk in self._instant_additional_apks + # pylint: disable=no-self-use def _InflateTests(self, tests): inflated_tests = [] @@ -991,14 +1187,13 @@ def _switchesToFlags(switches): def _annotationToSwitches(clazz, methods): if clazz == _PARAMETERIZED_COMMAND_LINE_FLAGS_SWITCHES: return [methods['value']] - elif clazz == _PARAMETERIZED_COMMAND_LINE_FLAGS: + if clazz == _PARAMETERIZED_COMMAND_LINE_FLAGS: list_of_switches = [] for annotation in methods['value']: - for clazz, methods in six.iteritems(annotation): - list_of_switches += _annotationToSwitches(clazz, methods) + for c, m in six.iteritems(annotation): + list_of_switches += _annotationToSwitches(c, m) return list_of_switches - else: - return [] + return [] def _setTestFlags(test, flags): if flags: diff --git a/build/android/pylib/instrumentation/instrumentation_test_instance_test.py b/build/android/pylib/instrumentation/instrumentation_test_instance_test.py index 3f382518e17a..945c404d2b84 100755 --- a/build/android/pylib/instrumentation/instrumentation_test_instance_test.py +++ b/build/android/pylib/instrumentation/instrumentation_test_instance_test.py @@ -1,5 +1,5 @@ -#!/usr/bin/env vpython -# Copyright 2014 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -7,7 +7,7 @@ # pylint: disable=protected-access -from __future__ import absolute_import + import collections import tempfile import unittest @@ -61,36 +61,36 @@ def test_initializeFlagAttributes_commandLineFlags(self): o = self.createTestInstance() args = self.createFlagAttributesArgs(command_line_flags=['--foo', '--bar']) o._initializeFlagAttributes(args) - self.assertEquals(o._flags, ['--enable-test-intents', '--foo', '--bar']) + self.assertEqual(o._flags, ['--enable-test-intents', '--foo', '--bar']) def test_initializeFlagAttributes_deviceFlagsFile(self): o = self.createTestInstance() - with tempfile.NamedTemporaryFile() as flags_file: + with tempfile.NamedTemporaryFile(mode='w') as flags_file: flags_file.write('\n'.join(['--foo', '--bar'])) flags_file.flush() args = self.createFlagAttributesArgs(device_flags_file=flags_file.name) o._initializeFlagAttributes(args) - self.assertEquals(o._flags, ['--enable-test-intents', '--foo', '--bar']) + self.assertEqual(o._flags, ['--enable-test-intents', '--foo', '--bar']) def test_initializeFlagAttributes_strictModeOn(self): o = self.createTestInstance() args = self.createFlagAttributesArgs(strict_mode='on') o._initializeFlagAttributes(args) - self.assertEquals(o._flags, ['--enable-test-intents', '--strict-mode=on']) + self.assertEqual(o._flags, ['--enable-test-intents', '--strict-mode=on']) def test_initializeFlagAttributes_strictModeOn_coverageOn(self): o = self.createTestInstance() args = self.createFlagAttributesArgs( strict_mode='on', coverage_dir='/coverage/dir') o._initializeFlagAttributes(args) - self.assertEquals(o._flags, ['--enable-test-intents']) + self.assertEqual(o._flags, ['--enable-test-intents']) def test_initializeFlagAttributes_strictModeOff(self): o = self.createTestInstance() args = self.createFlagAttributesArgs(strict_mode='off') o._initializeFlagAttributes(args) - self.assertEquals(o._flags, ['--enable-test-intents']) + self.assertEqual(o._flags, ['--enable-test-intents']) def testGetTests_noFilter(self): o = self.createTestInstance() @@ -153,11 +153,10 @@ def testGetTests_noFilter(self): }, ] - o._test_jar = 'path/to/test.jar' o._junit4_runner_class = 'J4Runner' actual_tests = o.ProcessRawTests(raw_tests) - self.assertEquals(actual_tests, expected_tests) + self.assertEqual(actual_tests, expected_tests) def testGetTests_simpleGtestFilter(self): o = self.createTestInstance() @@ -191,12 +190,146 @@ def testGetTests_simpleGtestFilter(self): }, ] - o._test_filter = 'org.chromium.test.SampleTest.testMethod1' - o._test_jar = 'path/to/test.jar' + o._test_filters = ['org.chromium.test.SampleTest.testMethod1'] + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + + self.assertEqual(actual_tests, expected_tests) + + def testGetTests_simpleGtestPositiveAndNegativeFilter(self): + o = self.createTestInstance() + raw_tests = [{ + 'annotations': { + 'Feature': { + 'value': ['Foo'] + } + }, + 'class': + 'org.chromium.test.SampleTest', + 'superclass': + 'java.lang.Object', + 'methods': [ + { + 'annotations': { + 'SmallTest': None + }, + 'method': 'testMethod1', + }, + { + 'annotations': { + 'MediumTest': None + }, + 'method': 'testMethod2', + }, + ], + }, { + 'annotations': { + 'Feature': { + 'value': ['Foo'] + } + }, + 'class': + 'org.chromium.test.SampleTest2', + 'superclass': + 'java.lang.Object', + 'methods': [{ + 'annotations': { + 'SmallTest': None + }, + 'method': 'testMethod1', + }], + }] + + expected_tests = [ + { + 'annotations': { + 'Feature': { + 'value': ['Foo'] + }, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod1', + }, + ] + + o._test_filters = [ + 'org.chromium.test.SampleTest.*'\ + '-org.chromium.test.SampleTest.testMethod2' + ] o._junit4_runner_class = 'J4Runner' actual_tests = o.ProcessRawTests(raw_tests) - self.assertEquals(actual_tests, expected_tests) + self.assertEqual(actual_tests, expected_tests) + + def testGetTests_multipleGtestPositiveAndNegativeFilter(self): + o = self.createTestInstance() + raw_tests = [{ + 'annotations': { + 'Feature': { + 'value': ['Foo'] + } + }, + 'class': + 'org.chromium.test.SampleTest', + 'superclass': + 'java.lang.Object', + 'methods': [ + { + 'annotations': { + 'SmallTest': None + }, + 'method': 'testMethod1', + }, + { + 'annotations': { + 'MediumTest': None + }, + 'method': 'testMethod2', + }, + ], + }, { + 'annotations': { + 'Feature': { + 'value': ['Foo'] + } + }, + 'class': + 'org.chromium.test.SampleTest2', + 'superclass': + 'java.lang.Object', + 'methods': [{ + 'annotations': { + 'SmallTest': None + }, + 'method': 'testMethod1', + }], + }] + + expected_tests = [ + { + 'annotations': { + 'Feature': { + 'value': ['Foo'] + }, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod1', + }, + ] + + o._test_filters = [ + 'org.chromium.test.SampleTest*testMethod1', + 'org.chromium.test.SampleTest.*'\ + '-org.chromium.test.SampleTest.testMethod2' + ] + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + + self.assertEqual(actual_tests, expected_tests) def testGetTests_simpleGtestUnqualifiedNameFilter(self): o = self.createTestInstance() @@ -230,12 +363,11 @@ def testGetTests_simpleGtestUnqualifiedNameFilter(self): }, ] - o._test_filter = 'SampleTest.testMethod1' - o._test_jar = 'path/to/test.jar' + o._test_filters = ['SampleTest.testMethod1'] o._junit4_runner_class = 'J4Runner' actual_tests = o.ProcessRawTests(raw_tests) - self.assertEquals(actual_tests, expected_tests) + self.assertEqual(actual_tests, expected_tests) def testGetTests_parameterizedTestGtestFilter(self): o = self.createTestInstance() @@ -289,12 +421,11 @@ def testGetTests_parameterizedTestGtestFilter(self): }, ] - o._test_jar = 'path/to/test.jar' o._junit4_runner_class = 'J4Runner' - o._test_filter = 'org.chromium.test.SampleTest.testMethod1' + o._test_filters = ['org.chromium.test.SampleTest.testMethod1'] actual_tests = o.ProcessRawTests(raw_tests) - self.assertEquals(actual_tests, expected_tests) + self.assertEqual(actual_tests, expected_tests) def testGetTests_wildcardGtestFilter(self): o = self.createTestInstance() @@ -339,12 +470,11 @@ def testGetTests_wildcardGtestFilter(self): }, ] - o._test_filter = 'org.chromium.test.SampleTest2.*' - o._test_jar = 'path/to/test.jar' + o._test_filters = ['org.chromium.test.SampleTest2.*'] o._junit4_runner_class = 'J4Runner' actual_tests = o.ProcessRawTests(raw_tests) - self.assertEquals(actual_tests, expected_tests) + self.assertEqual(actual_tests, expected_tests) def testGetTests_negativeGtestFilter(self): o = self.createTestInstance() @@ -398,12 +528,11 @@ def testGetTests_negativeGtestFilter(self): }, ] - o._test_filter = '*-org.chromium.test.SampleTest.testMethod1' - o._test_jar = 'path/to/test.jar' + o._test_filters = ['*-org.chromium.test.SampleTest.testMethod1'] o._junit4_runner_class = 'J4Runner' actual_tests = o.ProcessRawTests(raw_tests) - self.assertEquals(actual_tests, expected_tests) + self.assertEqual(actual_tests, expected_tests) def testGetTests_annotationFilter(self): o = self.createTestInstance() @@ -458,11 +587,10 @@ def testGetTests_annotationFilter(self): ] o._annotations = [('SmallTest', None)] - o._test_jar = 'path/to/test.jar' o._junit4_runner_class = 'J4Runner' actual_tests = o.ProcessRawTests(raw_tests) - self.assertEquals(actual_tests, expected_tests) + self.assertEqual(actual_tests, expected_tests) def testGetTests_excludedAnnotationFilter(self): o = self.createTestInstance() @@ -510,11 +638,103 @@ def testGetTests_excludedAnnotationFilter(self): ] o._excluded_annotations = [('SmallTest', None)] - o._test_jar = 'path/to/test.jar' o._junit4_runner_class = 'J4Runner' actual_tests = o.ProcessRawTests(raw_tests) - self.assertEquals(actual_tests, expected_tests) + self.assertEqual(actual_tests, expected_tests) + + def testGetTests_excludedDoNotReviveAnnotation(self): + o = self.createTestInstance() + raw_tests = [{ + 'annotations': { + 'Feature': { + 'value': ['Foo'] + } + }, + 'class': + 'org.chromium.test.SampleTest', + 'superclass': + 'junit.framework.TestCase', + 'methods': [ + { + 'annotations': { + 'DisabledTest': None, + 'DoNotRevive': { + 'reason': 'sample reason' + }, + }, + 'method': 'testMethod1', + }, + { + 'annotations': { + 'FlakyTest': None, + }, + 'method': 'testMethod2', + }, + ], + }, { + 'annotations': { + 'Feature': { + 'value': ['Bar'] + } + }, + 'class': + 'org.chromium.test.SampleTest2', + 'superclass': + 'junit.framework.TestCase', + 'methods': [ + { + 'annotations': { + 'FlakyTest': None, + 'DoNotRevive': { + 'reason': 'sample reason' + }, + }, + 'method': 'testMethod1', + }, + ], + }, { + 'annotations': { + 'Feature': { + 'value': ['Baz'] + } + }, + 'class': + 'org.chromium.test.SampleTest3', + 'superclass': + 'junit.framework.TestCase', + 'methods': [ + { + 'annotations': { + 'FlakyTest': None, + 'Manual': { + 'message': 'sample message' + }, + }, + 'method': 'testMethod1', + }, + ], + }] + + expected_tests = [ + { + 'annotations': { + 'Feature': { + 'value': ['Foo'] + }, + 'FlakyTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod2', + }, + ] + + o._excluded_annotations = [('DoNotRevive', None), ('Manual', None)] + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + + self.assertEqual(actual_tests, expected_tests) def testGetTests_annotationSimpleValueFilter(self): o = self.createTestInstance() @@ -572,11 +792,10 @@ def testGetTests_annotationSimpleValueFilter(self): ] o._annotations = [('TestValue', '1')] - o._test_jar = 'path/to/test.jar' o._junit4_runner_class = 'J4Runner' actual_tests = o.ProcessRawTests(raw_tests) - self.assertEquals(actual_tests, expected_tests) + self.assertEqual(actual_tests, expected_tests) def testGetTests_annotationDictValueFilter(self): o = self.createTestInstance() @@ -622,11 +841,10 @@ def testGetTests_annotationDictValueFilter(self): ] o._annotations = [('Feature', 'Bar')] - o._test_jar = 'path/to/test.jar' o._junit4_runner_class = 'J4Runner' actual_tests = o.ProcessRawTests(raw_tests) - self.assertEquals(actual_tests, expected_tests) + self.assertEqual(actual_tests, expected_tests) def testGetTestName(self): test = { @@ -644,13 +862,11 @@ def testGetTestName(self): 'method': test['method'] } - self.assertEquals( - instrumentation_test_instance.GetTestName(test, sep='.'), - 'org.chromium.TestA.testSimple') - self.assertEquals( - instrumentation_test_instance.GetTestName( - unqualified_class_test, sep='.'), - 'TestA.testSimple') + self.assertEqual(instrumentation_test_instance.GetTestName(test, sep='.'), + 'org.chromium.TestA.testSimple') + self.assertEqual( + instrumentation_test_instance.GetTestName(unqualified_class_test, + sep='.'), 'TestA.testSimple') def testGetUniqueTestName(self): test = { @@ -663,9 +879,8 @@ def testGetUniqueTestName(self): 'flags': ['enable_features=abc'], 'is_junit4': True, 'method': 'testSimple'} - self.assertEquals( - instrumentation_test_instance.GetUniqueTestName( - test, sep='.'), + self.assertEqual( + instrumentation_test_instance.GetUniqueTestName(test, sep='.'), 'org.chromium.TestA.testSimple_with_enable_features=abc') def testGetTestNameWithoutParameterPostfix(self): @@ -683,14 +898,12 @@ def testGetTestNameWithoutParameterPostfix(self): 'class': test['class'].split('.')[-1], 'method': test['method'] } - self.assertEquals( + self.assertEqual( instrumentation_test_instance.GetTestNameWithoutParameterPostfix( - test, sep='.'), - 'org.chromium.TestA') - self.assertEquals( + test, sep='.'), 'org.chromium.TestA') + self.assertEqual( instrumentation_test_instance.GetTestNameWithoutParameterPostfix( - unqualified_class_test, sep='.'), - 'TestA') + unqualified_class_test, sep='.'), 'TestA') def testGetTests_multipleAnnotationValuesRequested(self): o = self.createTestInstance() @@ -752,11 +965,10 @@ def testGetTests_multipleAnnotationValuesRequested(self): ] o._annotations = [('Feature', 'Bar'), ('Feature', 'Baz')] - o._test_jar = 'path/to/test.jar' o._junit4_runner_class = 'J4Runner' actual_tests = o.ProcessRawTests(raw_tests) - self.assertEquals(actual_tests, expected_tests) + self.assertEqual(actual_tests, expected_tests) def testGenerateTestResults_noStatus(self): results = instrumentation_test_instance.GenerateTestResults( @@ -950,10 +1162,9 @@ def testParameterizedCommandLineFlagsSwitches(self): expected_tests[i]['annotations'].update( raw_tests[0]['methods'][i]['annotations']) - o._test_jar = 'path/to/test.jar' o._junit4_runner_class = 'J4Runner' actual_tests = o.ProcessRawTests(raw_tests) - self.assertEquals(actual_tests, expected_tests) + self.assertEqual(actual_tests, expected_tests) def testParameterizedCommandLineFlags(self): o = self.createTestInstance() @@ -1073,10 +1284,9 @@ def testParameterizedCommandLineFlags(self): expected_tests[4]['annotations'].update( raw_tests[0]['methods'][0]['annotations']) - o._test_jar = 'path/to/test.jar' o._junit4_runner_class = 'J4Runner' actual_tests = o.ProcessRawTests(raw_tests) - self.assertEquals(actual_tests, expected_tests) + self.assertEqual(actual_tests, expected_tests) def testDifferentCommandLineParameterizations(self): o = self.createTestInstance() @@ -1134,10 +1344,9 @@ def testDifferentCommandLineParameterizations(self): expected_tests[i]['annotations'].update( raw_tests[0]['methods'][i]['annotations']) - o._test_jar = 'path/to/test.jar' o._junit4_runner_class = 'J4Runner' actual_tests = o.ProcessRawTests(raw_tests) - self.assertEquals(actual_tests, expected_tests) + self.assertEqual(actual_tests, expected_tests) def testMultipleCommandLineParameterizations_raises(self): o = self.createTestInstance() @@ -1178,7 +1387,6 @@ def testMultipleCommandLineParameterizations_raises(self): }, ] - o._test_jar = 'path/to/test.jar' o._junit4_runner_class = 'J4Runner' self.assertRaises( instrumentation_test_instance.CommandLineParameterizationException, diff --git a/build/android/pylib/instrumentation/json_perf_parser.py b/build/android/pylib/instrumentation/json_perf_parser.py index 695407458b6c..ef541f49a114 100644 --- a/build/android/pylib/instrumentation/json_perf_parser.py +++ b/build/android/pylib/instrumentation/json_perf_parser.py @@ -1,11 +1,11 @@ -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """A helper module for parsing JSON objects from perf tests results.""" -from __future__ import absolute_import + import json diff --git a/build/android/pylib/instrumentation/test_result.py b/build/android/pylib/instrumentation/test_result.py index ed1e44cbced2..54aee3d62b32 100644 --- a/build/android/pylib/instrumentation/test_result.py +++ b/build/android/pylib/instrumentation/test_result.py @@ -1,8 +1,7 @@ -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import from pylib.base import base_test_result @@ -18,8 +17,7 @@ def __init__(self, full_name, test_type, dur, log=''): dur: Duration of the test run in milliseconds. log: A string listing any errors. """ - super(InstrumentationTestResult, self).__init__( - full_name, test_type, dur, log) + super().__init__(full_name, test_type, dur, log) name_pieces = full_name.rsplit('#') if len(name_pieces) > 1: self._test_name = name_pieces[1] diff --git a/build/android/pylib/junit/__init__.py b/build/android/pylib/junit/__init__.py index 4d6aabb953d6..d46d7b496679 100644 --- a/build/android/pylib/junit/__init__.py +++ b/build/android/pylib/junit/__init__.py @@ -1,3 +1,3 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/junit/junit_test_instance.py b/build/android/pylib/junit/junit_test_instance.py index b6bf8ae27235..58d8c31761c8 100644 --- a/build/android/pylib/junit/junit_test_instance.py +++ b/build/android/pylib/junit/junit_test_instance.py @@ -1,8 +1,7 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import from pylib.base import test_instance from pylib.utils import test_filter @@ -10,17 +9,20 @@ class JunitTestInstance(test_instance.TestInstance): def __init__(self, args, _): - super(JunitTestInstance, self).__init__() + super().__init__() self._coverage_dir = args.coverage_dir self._debug_socket = args.debug_socket self._coverage_on_the_fly = args.coverage_on_the_fly + self._native_libs_dir = args.native_libs_dir self._package_filter = args.package_filter self._resource_apk = args.resource_apk self._robolectric_runtime_deps_dir = args.robolectric_runtime_deps_dir self._runner_filter = args.runner_filter self._shards = args.shards - self._test_filter = test_filter.InitializeFilterFromArgs(args) + self._test_filters = test_filter.InitializeFiltersFromArgs(args) + self._has_literal_filters = (args.isolated_script_test_filters + or args.test_filters) self._test_suite = args.test_suite #override @@ -47,6 +49,10 @@ def coverage_on_the_fly(self): def debug_socket(self): return self._debug_socket + @property + def native_libs_dir(self): + return self._native_libs_dir + @property def package_filter(self): return self._package_filter @@ -64,8 +70,12 @@ def runner_filter(self): return self._runner_filter @property - def test_filter(self): - return self._test_filter + def test_filters(self): + return self._test_filters + + @property + def has_literal_filters(self): + return self._has_literal_filters @property def shards(self): diff --git a/build/android/pylib/local/__init__.py b/build/android/pylib/local/__init__.py index 4d6aabb953d6..d46d7b496679 100644 --- a/build/android/pylib/local/__init__.py +++ b/build/android/pylib/local/__init__.py @@ -1,3 +1,3 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/local/device/__init__.py b/build/android/pylib/local/device/__init__.py index 4d6aabb953d6..d46d7b496679 100644 --- a/build/android/pylib/local/device/__init__.py +++ b/build/android/pylib/local/device/__init__.py @@ -1,3 +1,3 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/local/device/local_device_environment.py b/build/android/pylib/local/device/local_device_environment.py index d2a9077a0a56..a51f370b2b1a 100644 --- a/build/android/pylib/local/device/local_device_environment.py +++ b/build/android/pylib/local/device/local_device_environment.py @@ -1,8 +1,8 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import + import datetime import functools import logging @@ -34,6 +34,8 @@ 'StrictMode:D', ] +SYSTEM_USER_ID = 0 + def _DeviceCachePath(device): file_name = 'device_cache_%s.json' % device.adb.GetDeviceSerial() @@ -84,7 +86,7 @@ def wrapper(dev, *args, **kwargs): return decorator -def place_nomedia_on_device(dev, device_root): +def place_nomedia_on_device(dev, device_root, run_as=None, as_root=False): """Places .nomedia file in test data root. This helps to prevent system from scanning media files inside test data. @@ -94,10 +96,19 @@ def place_nomedia_on_device(dev, device_root): device_root: Base path on device to place .nomedia file. """ - dev.RunShellCommand(['mkdir', '-p', device_root], check_return=True) - dev.WriteFile('%s/.nomedia' % device_root, 'https://crbug.com/796640') + dev.RunShellCommand(['mkdir', '-p', device_root], + run_as=run_as, + as_root=as_root, + check_return=True) + dev.WriteFile('%s/.nomedia' % device_root, + 'https://crbug.com/796640', + run_as=run_as, + as_root=as_root) +# TODO(1262303): After Telemetry is supported by python3 we can re-add +# super without arguments in this script. +# pylint: disable=super-with-arguments class LocalDeviceEnvironment(environment.Environment): def __init__(self, args, output_manager, _error_func): @@ -124,6 +135,8 @@ def __init__(self, args, output_manager, _error_func): self._trace_all = None if hasattr(args, 'trace_all'): self._trace_all = args.trace_all + self._use_persistent_shell = args.use_persistent_shell + self._disable_test_server = args.disable_test_server devil_chromium.Initialize( output_directory=constants.GetOutDirectory(), @@ -163,7 +176,8 @@ def _InitDevices(self): enable_device_files_cache=self._enable_device_cache, default_retries=self._max_tries - 1, device_arg=device_arg, - abis=self._preferred_abis) + abis=self._preferred_abis, + persistent_shell=self._use_persistent_shell) if self._logcat_output_file: self._logcat_output_dir = tempfile.mkdtemp() @@ -171,6 +185,12 @@ def _InitDevices(self): @handle_shard_failures_with(on_failure=self.DenylistDevice) def prepare_device(d): d.WaitUntilFullyBooted() + if d.GetCurrentUser() != SYSTEM_USER_ID: + # Use system user to run tasks to avoid "/sdcard "accessing issue + # due to multiple-users. For details, see + # https://source.android.com/docs/devices/admin/multi-user-testing + logging.info('Switching to user with id %s', SYSTEM_USER_ID) + d.SwitchUser(SYSTEM_USER_ID) if self._enable_device_cache: cache_path = _DeviceCachePath(d) @@ -186,8 +206,10 @@ def prepare_device(d): self._logcat_output_dir, '%s_%s' % (d.adb.GetDeviceSerial(), datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%S'))) - monitor = logcat_monitor.LogcatMonitor( - d.adb, clear=True, output_file=logcat_file) + monitor = logcat_monitor.LogcatMonitor(d.adb, + clear=True, + output_file=logcat_file, + check_error=False) self._logcat_monitors.append(monitor) monitor.Start() @@ -243,6 +265,10 @@ def tool(self): def trace_output(self): return self._trace_output + @property + def disable_test_server(self): + return self._disable_test_server + #override def TearDown(self): if self.trace_output and self._trace_all: diff --git a/build/android/pylib/local/device/local_device_gtest_run.py b/build/android/pylib/local/device/local_device_gtest_run.py index 753556d44e82..796f614d78c8 100644 --- a/build/android/pylib/local/device/local_device_gtest_run.py +++ b/build/android/pylib/local/device/local_device_gtest_run.py @@ -1,10 +1,11 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import + import contextlib import collections +import fnmatch import itertools import logging import math @@ -54,6 +55,9 @@ 'org.chromium.native_test.NativeTestInstrumentationTestRunner' '.TestList') +# Used to identify the prefix in gtests. +_GTEST_PRETEST_PREFIX = 'PRE_' + _SECONDS_TO_NANOS = int(1e9) # Tests that use SpawnedTestServer must run the LocalTestServerSpawner on the @@ -76,7 +80,7 @@ # No-op context manager. If we used Python 3, we could change this to # contextlib.ExitStack() -class _NullContextManager(object): +class _NullContextManager: def __enter__(self): pass def __exit__(self, *args): @@ -91,29 +95,39 @@ def _GenerateSequentialFileNames(filename): yield '%s_%d%s' % (base, i, ext) -def _ExtractTestsFromFilter(gtest_filter): - """Returns the list of tests specified by the given filter. +def _ExtractTestsFromFilters(gtest_filters): + """Returns the list of tests specified by the given filters. Returns: None if the device should be queried for the test list instead. """ - # Empty means all tests, - means exclude filter. - if not gtest_filter or '-' in gtest_filter: + # - means exclude filter. + for gtest_filter in gtest_filters: + if '-' in gtest_filter: + return None + # Empty means all tests + if not any(gtest_filters): return None - patterns = gtest_filter.split(':') - # For a single pattern, allow it even if it has a wildcard so long as the - # wildcard comes at the end and there is at least one . to prove the scope is - # not too large. - # This heuristic is not necessarily faster, but normally is. - if len(patterns) == 1 and patterns[0].endswith('*'): - no_suffix = patterns[0].rstrip('*') - if '*' not in no_suffix and '.' in no_suffix: - return patterns - - if '*' in gtest_filter: - return None - return patterns + if len(gtest_filters) == 1: + patterns = gtest_filters[0].split(':') + # For a single pattern, allow it even if it has a wildcard so long as the + # wildcard comes at the end and there is at least one . to prove the scope + # is not too large. + # This heuristic is not necessarily faster, but normally is. + if len(patterns) == 1 and patterns[0].endswith('*'): + no_suffix = patterns[0].rstrip('*') + if '*' not in no_suffix and '.' in no_suffix: + return patterns + + all_patterns = set(gtest_filters[0].split(':')) + for gtest_filter in gtest_filters: + patterns = gtest_filter.split(':') + for pattern in patterns: + if '*' in pattern: + return None + all_patterns = all_patterns.intersection(set(patterns)) + return list(all_patterns) def _GetDeviceTimeoutMultiplier(): @@ -232,7 +246,7 @@ def _GetLLVMProfilePath(device_coverage_dir, suite, coverage_index): str(coverage_index), '%2m.profraw'])) -class _ApkDelegate(object): +class _ApkDelegate: def __init__(self, test_instance, tool): self._activity = test_instance.activity self._apk_helper = test_instance.apk_helper @@ -248,6 +262,7 @@ def __init__(self, test_instance, tool): self._tool = tool self._coverage_dir = test_instance.coverage_dir self._coverage_index = 0 + self._use_existing_test_data = test_instance.use_existing_test_data def GetTestDataRoot(self, device): # pylint: disable=no-self-use @@ -255,6 +270,8 @@ def GetTestDataRoot(self, device): 'chromium_tests_root') def Install(self, device): + if self._use_existing_test_data: + return if self._test_apk_incremental_install_json: installer.Install(device, self._test_apk_incremental_install_json, apk=self._apk_helper, permissions=self._permissions) @@ -265,8 +282,8 @@ def Install(self, device): reinstall=True, permissions=self._permissions) - def ResultsDirectory(self, device): - return device.GetApplicationDataDirectory(self._package) + def ResultsDirectory(self, device): # pylint: disable=no-self-use + return device.GetExternalStoragePath() def Run(self, test, device, flags=None, **kwargs): extras = dict(self._extras) @@ -287,7 +304,6 @@ def Run(self, test, device, flags=None, **kwargs): extras[gtest_test_instance.EXTRA_SHARD_NANO_TIMEOUT] = int( kwargs['timeout'] * _SECONDS_TO_NANOS) - # pylint: disable=redefined-variable-type command_line_file = _NullContextManager() if flags: if len(flags) > _MAX_INLINE_FLAGS_LENGTH: @@ -305,7 +321,6 @@ def Run(self, test, device, flags=None, **kwargs): extras[_EXTRA_TEST_LIST] = test_list_file.name else: extras[_EXTRA_TEST] = test[0] - # pylint: enable=redefined-variable-type # We need to use GetAppWritablePath here instead of GetExternalStoragePath # since we will not have yet applied legacy storage permission workarounds @@ -362,7 +377,7 @@ def Clear(self, device): device.ClearApplicationState(self._package, permissions=self._permissions) -class _ExeDelegate(object): +class _ExeDelegate: def __init__(self, tr, test_instance, tool): self._host_dist_dir = test_instance.exe_dist_dir @@ -456,14 +471,13 @@ class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun): def __init__(self, env, test_instance): assert isinstance(env, local_device_environment.LocalDeviceEnvironment) assert isinstance(test_instance, gtest_test_instance.GtestTestInstance) - super(LocalDeviceGtestRun, self).__init__(env, test_instance) + super().__init__(env, test_instance) if self._test_instance.apk_helper: self._installed_packages = [ self._test_instance.apk_helper.GetPackageName() ] - # pylint: disable=redefined-variable-type if self._test_instance.apk: self._delegate = _ApkDelegate(self._test_instance, env.tool) elif self._test_instance.exe_dist_dir: @@ -473,7 +487,6 @@ def __init__(self, env, test_instance): self._test_instance.isolated_script_test_perf_output) else: self._test_perf_output_filenames = itertools.repeat(None) - # pylint: enable=redefined-variable-type self._crashes = set() self._servers = collections.defaultdict(list) @@ -492,6 +505,8 @@ def install_apk(dev): self._delegate.Install(dev) def push_test_data(dev): + if self._test_instance.use_existing_test_data: + return # Push data dependencies. device_root = self._delegate.GetTestDataRoot(dev) host_device_tuples_substituted = [ @@ -514,13 +529,17 @@ def init_tool_and_start_servers(dev): tool.CopyFiles(dev) tool.SetupEnvironment() + if self._env.disable_test_server: + logging.warning('Not starting test server. Some tests may fail.') + return + try: # See https://crbug.com/1030827. # This is a hack that may break in the future. We're relying on the # fact that adb doesn't use ipv6 for it's server, and so doesn't # listen on ipv6, but ssh remote forwarding does. 5037 is the port # number adb uses for its server. - if "[::1]:5037" in subprocess.check_output( + if b"[::1]:5037" in subprocess.check_output( "ss -o state listening 'sport = 5037'", shell=True): logging.error( 'Test Server cannot be started with a remote-forwarded adb ' @@ -541,15 +560,6 @@ def init_tool_and_start_servers(dev): def bind_crash_handler(step, dev): return lambda: crash_handler.RetryOnSystemCrash(step, dev) - # Explicitly enable root to ensure that tests run under deterministic - # conditions. Without this explicit call, EnableRoot() is called from - # push_test_data() when PushChangedFiles() determines that it should use - # _PushChangedFilesZipped(), which is only most of the time. - # Root is required (amongst maybe other reasons) to pull the results file - # from the device, since it lives within the application's data directory - # (via GetApplicationDataDirectory()). - device.EnableRoot() - steps = [ bind_crash_handler(s, device) for s in (install_apk, push_test_data, init_tool_and_start_servers)] @@ -564,11 +574,25 @@ def bind_crash_handler(step, dev): self._test_instance.GetDataDependencies()) #override - def _ShouldShard(self): + def _ShouldShardTestsForDevices(self): + """Shard tests across several devices. + + Returns: + True if tests should be sharded across several devices, + False otherwise. + """ return True #override - def _CreateShards(self, tests): + def _CreateShardsForDevices(self, tests): + """Create shards of tests to run on devices. + + Args: + tests: List containing tests or test batches. + + Returns: + List of test batches. + """ # _crashes are tests that might crash and make the tests in the same shard # following the crashed testcase not run. # Thus we need to create separate shards for each crashed testcase, @@ -582,6 +606,10 @@ def _CreateShards(self, tests): # Delete suspect testcase from tests. tests = [test for test in tests if not test in self._crashes] + # Sort tests by hash. + # TODO(crbug.com/1257820): Add sorting logic back to _PartitionTests. + tests = self._SortTests(tests) + max_shard_size = self._test_instance.test_launcher_batch_limit shards.extend(self._PartitionTests(tests, device_count, max_shard_size)) @@ -593,7 +621,7 @@ def _GetTests(self): # When the exact list of tests to run is given via command-line (e.g. when # locally iterating on a specific test), skip querying the device (which # takes ~3 seconds). - tests = _ExtractTestsFromFilter(self._test_instance.gtest_filter) + tests = _ExtractTestsFromFilters(self._test_instance.gtest_filters) if tests: return tests @@ -609,8 +637,10 @@ def list_tests(dev): timeout = None flags = [ - f for f in self._test_instance.flags - if f not in ['--wait-for-debugger', '--wait-for-java-debugger'] + f for f in self._test_instance.flags if f not in [ + '--wait-for-debugger', '--wait-for-java-debugger', + '--gtest_also_run_disabled_tests' + ] ] flags.append('--gtest_list_tests') @@ -652,6 +682,42 @@ def list_tests(dev): self._test_instance.total_external_shards) return tests + #override + def _GroupTests(self, tests): + pre_tests = dict() + other_tests = [] + for test in tests: + test_name_start = max(test.find('.') + 1, 0) + test_name = test[test_name_start:] + if test_name_start == 0 or not test_name.startswith( + _GTEST_PRETEST_PREFIX): + other_tests.append(test) + else: + test_suite = test[:test_name_start - 1] + trim_test = test + trim_tests = [test] + + while test_name.startswith(_GTEST_PRETEST_PREFIX): + test_name = test_name[len(_GTEST_PRETEST_PREFIX):] + trim_test = '%s.%s' % (test_suite, test_name) + trim_tests.append(trim_test) + + if not trim_test in pre_tests or len( + pre_tests[trim_test]) < len(trim_tests): + pre_tests[trim_test] = trim_tests + + all_tests = [] + for other_test in other_tests: + if not other_test in pre_tests: + all_tests.append(other_test) + + # TODO(crbug.com/1257820): Add logic to support grouping tests. + # Once grouping logic is added, switch to 'append' from 'extend'. + for _, test_list in pre_tests.items(): + all_tests.extend(test_list) + + return all_tests + def _UploadTestArtifacts(self, device, test_artifacts_dir): # TODO(jbudorick): Reconcile this with the output manager once # https://codereview.chromium.org/2933993002/ lands. @@ -718,7 +784,7 @@ def _ArchiveLogcat(self, device, test): if logmon: logmon.Close() if logcat_file and logcat_file.Link(): - logging.info('Logcat saved to %s', logcat_file.Link()) + logging.critical('Logcat saved to %s', logcat_file.Link()) #override def _RunTest(self, device, test): @@ -867,6 +933,19 @@ def _RunTest(self, device, test): gtest_test_instance.TestNameWithoutDisabledPrefix(t)) not_run_tests = tests_stripped_disabled_prefix.difference( set(r.GetName() for r in results)) + + if self._test_instance.extract_test_list_from_filter: + # A test string might end with a * in this mode, and so may not match any + # r.GetName() for the set difference. It's possible a filter like foo.* + # can match two tests, ie foo.baz and foo.foo. + # When running it's possible Foo.baz is ran, foo.foo is not, but the test + # list foo.* will not be reran as at least one result matched it. + not_run_tests = { + t + for t in not_run_tests + if not any(fnmatch.fnmatch(r.GetName(), t) for r in results) + } + return results, list(not_run_tests) if results else None #override diff --git a/build/android/pylib/local/device/local_device_gtest_run_test.py b/build/android/pylib/local/device/local_device_gtest_run_test.py index b08b24b4fa51..5a485c6b31b2 100755 --- a/build/android/pylib/local/device/local_device_gtest_run_test.py +++ b/build/android/pylib/local/device/local_device_gtest_run_test.py @@ -1,12 +1,12 @@ -#!/usr/bin/env vpython -# Copyright 2021 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2021 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Tests for local_device_gtest_test_run.""" # pylint: disable=protected-access -from __future__ import absolute_import + import os import tempfile import unittest @@ -19,6 +19,11 @@ import mock # pylint: disable=import-error +def isSliceInList(s, l): + lenOfSlice = len(s) + return any(s == l[i:lenOfSlice + i] for i in range(len(l) - lenOfSlice + 1)) + + class LocalDeviceGtestRunTest(unittest.TestCase): def setUp(self): self._obj = local_device_gtest_run.LocalDeviceGtestRun( @@ -27,21 +32,27 @@ def setUp(self): def testExtractTestsFromFilter(self): # Checks splitting by colons. - self.assertEqual([ - 'b17', - 'm4e3', - 'p51', - ], local_device_gtest_run._ExtractTestsFromFilter('b17:m4e3:p51')) + self.assertEqual( + set([ + 'm4e3', + 'p51', + 'b17', + ]), + set(local_device_gtest_run._ExtractTestsFromFilters(['b17:m4e3:p51']))) # Checks the '-' sign. - self.assertIsNone(local_device_gtest_run._ExtractTestsFromFilter('-mk2')) + self.assertIsNone(local_device_gtest_run._ExtractTestsFromFilters(['-mk2'])) # Checks the more than one asterick. self.assertIsNone( - local_device_gtest_run._ExtractTestsFromFilter('.mk2*:.M67*')) + local_device_gtest_run._ExtractTestsFromFilters(['.mk2*:.M67*'])) # Checks just an asterick without a period - self.assertIsNone(local_device_gtest_run._ExtractTestsFromFilter('M67*')) + self.assertIsNone(local_device_gtest_run._ExtractTestsFromFilters(['M67*'])) # Checks an asterick at the end with a period. self.assertEqual(['.M67*'], - local_device_gtest_run._ExtractTestsFromFilter('.M67*')) + local_device_gtest_run._ExtractTestsFromFilters(['.M67*'])) + # Checks multiple filters intersect + self.assertEqual(['m4e3'], + local_device_gtest_run._ExtractTestsFromFilters( + ['b17:m4e3:p51', 'b17:m4e3', 'm4e3:p51'])) def testGetLLVMProfilePath(self): path = local_device_gtest_run._GetLLVMProfilePath('test_dir', 'sr71', '5') @@ -74,6 +85,34 @@ def testUploadTestArtifacts(self, mock_gsh): self.assertTrue(mock_gsh.called) self.assertEqual(result, link) + def testGroupTests(self): + test = [ + "TestClass1.testcase1", + "TestClass1.otherTestCase", + "TestClass1.PRE_testcase1", + "TestClass1.abc_testcase2", + "TestClass1.PRE_PRE_testcase1", + "TestClass1.PRE_abc_testcase2", + "TestClass1.PRE_PRE_abc_testcase2", + ] + expectedTestcase1 = [ + "TestClass1.PRE_PRE_testcase1", + "TestClass1.PRE_testcase1", + "TestClass1.testcase1", + ] + expectedTestcase2 = [ + "TestClass1.PRE_PRE_abc_testcase2", + "TestClass1.PRE_abc_testcase2", + "TestClass1.abc_testcase2", + ] + expectedOtherTestcase = [ + "TestClass1.otherTestCase", + ] + actualTestCase = self._obj._GroupTests(test) + self.assertTrue(isSliceInList(expectedTestcase1, actualTestCase)) + self.assertTrue(isSliceInList(expectedTestcase2, actualTestCase)) + self.assertTrue(isSliceInList(expectedOtherTestcase, actualTestCase)) + if __name__ == '__main__': unittest.main(verbosity=2) diff --git a/build/android/pylib/local/device/local_device_instrumentation_test_run.py b/build/android/pylib/local/device/local_device_instrumentation_test_run.py index 6cb0c6081a3b..cda22a497efe 100644 --- a/build/android/pylib/local/device/local_device_instrumentation_test_run.py +++ b/build/android/pylib/local/device/local_device_instrumentation_test_run.py @@ -1,8 +1,7 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import import collections import contextlib import copy @@ -26,6 +25,7 @@ from devil.android import device_temp_file from devil.android import flag_changer from devil.android.sdk import shared_prefs +from devil.android.sdk import version_codes from devil.android import logcat_monitor from devil.android.tools import system_app from devil.android.tools import webview_app @@ -98,24 +98,32 @@ _EXTRA_TEST_LIST = ( 'org.chromium.base.test.BaseChromiumAndroidJUnitRunner.TestList') +_EXTRA_TEST_IS_UNIT = ( + 'org.chromium.base.test.BaseChromiumAndroidJUnitRunner.IsUnitTest') + _EXTRA_PACKAGE_UNDER_TEST = ('org.chromium.chrome.test.pagecontroller.rules.' 'ChromeUiApplicationTestRule.PackageUnderTest') FEATURE_ANNOTATION = 'Feature' RENDER_TEST_FEATURE_ANNOTATION = 'RenderTest' WPR_ARCHIVE_FILE_PATH_ANNOTATION = 'WPRArchiveDirectory' +WPR_ARCHIVE_NAME_ANNOTATION = 'WPRArchiveDirectory$ArchiveName' WPR_RECORD_REPLAY_TEST_FEATURE_ANNOTATION = 'WPRRecordReplayTest' _DEVICE_GOLD_DIR = 'skia_gold' # A map of Android product models to SDK ints. RENDER_TEST_MODEL_SDK_CONFIGS = { # Android x86 emulator. - 'Android SDK built for x86': [23], - 'Pixel 2': [28], + 'Android SDK built for x86': [23, 24], + # We would like this to be supported, but it is currently too prone to + # introducing flakiness due to a combination of Gold and Chromium issues. + # See crbug.com/1233700 and skbug.com/12149 for more information. + # 'Pixel 2': [28], } _BATCH_SUFFIX = '_batch' -_TEST_BATCH_MAX_GROUP_SIZE = 256 +# If the batch is too big it starts to fail for command line length reasons. +_LOCAL_TEST_BATCH_MAX_GROUP_SIZE = 200 @contextlib.contextmanager @@ -130,22 +138,38 @@ def _LogTestEndpoints(device, test_name): ['log', '-p', 'i', '-t', _TAG, 'END %s' % test_name], check_return=True) -# TODO(jbudorick): Make this private once the instrumentation test_runner -# is deprecated. -def DidPackageCrashOnDevice(package_name, device): + +@contextlib.contextmanager +def _VoiceInteractionService(device, use_voice_interaction_service): + def set_voice_interaction_service(service): + device.RunShellCommand( + ['settings', 'put', 'secure', 'voice_interaction_service', service]) + + default_voice_interaction_service = None + try: + default_voice_interaction_service = device.RunShellCommand( + ['settings', 'get', 'secure', 'voice_interaction_service'], + single_line=True) + + set_voice_interaction_service(use_voice_interaction_service) + yield + finally: + set_voice_interaction_service(default_voice_interaction_service) + + +def DismissCrashDialogs(device): # Dismiss any error dialogs. Limit the number in case we have an error # loop or we are failing to dismiss. + packages = set() try: for _ in range(10): package = device.DismissCrashDialogIfNeeded(timeout=10, retries=1) if not package: - return False - # Assume test package convention of ".test" suffix - if package in package_name: - return True + break + packages.add(package) except device_errors.CommandFailedError: logging.exception('Error while attempting to dismiss crash dialog.') - return False + return packages _CURRENT_FOCUS_CRASH_RE = re.compile( @@ -161,8 +185,7 @@ def _GetTargetPackageName(test_apk): class LocalDeviceInstrumentationTestRun( local_device_test_run.LocalDeviceTestRun): def __init__(self, env, test_instance): - super(LocalDeviceInstrumentationTestRun, self).__init__( - env, test_instance) + super().__init__(env, test_instance) self._chrome_proxy = None self._context_managers = collections.defaultdict(list) self._flag_changers = {} @@ -170,14 +193,22 @@ def __init__(self, env, test_instance): self._shared_prefs_to_restore = [] self._skia_gold_session_manager = None self._skia_gold_work_dir = None + self._target_package = _GetTargetPackageName(test_instance.test_apk) #override def TestPackage(self): return self._test_instance.suite + def _GetDataStorageRootDirectory(self, device): + if self._test_instance.store_data_in_app_directory: + # TODO(rmhasan): Add check to makes sure api level > 27. Selinux + # policy on Oreo does not allow app to read files from app data dir + # that were not put there by the app. + return device.GetApplicationDataDirectory(self._target_package) + return device.GetExternalStoragePath() + #override def SetUp(self): - target_package = _GetTargetPackageName(self._test_instance.test_apk) @local_device_environment.handle_shard_failures_with( self._env.DenylistDevice) @@ -197,8 +228,7 @@ def replace_package(dev): # manually invoke its __enter__ and __exit__ methods in setup and # teardown. system_app_context = system_app.ReplaceSystemApp( - dev, self._test_instance.replace_system_package.package, - self._test_instance.replace_system_package.replacement_apk) + dev, self._test_instance.replace_system_package) # Pylint is not smart enough to realize that this field has # an __enter__ method, and will complain loudly. # pylint: disable=no-member @@ -223,43 +253,34 @@ def remove_packages(dev): # concurrent adb with this option specified, this should be safe. steps.insert(0, remove_packages) - if self._test_instance.use_webview_provider: - @trace_event.traced - def use_webview_provider(dev): - # We need the context manager to be applied before modifying any - # shared preference files in case the replacement APK needs to be - # set up, and it needs to be applied while the test is running. - # Thus, it needs to be applied early during setup, but must still be - # applied during _RunTest, which isn't possible using 'with' without - # applying the context manager up in test_runner. Instead, we - # manually invoke its __enter__ and __exit__ methods in setup and - # teardown. - webview_context = webview_app.UseWebViewProvider( - dev, self._test_instance.use_webview_provider) - # Pylint is not smart enough to realize that this field has - # an __enter__ method, and will complain loudly. - # pylint: disable=no-member - webview_context.__enter__() - # pylint: enable=no-member - self._context_managers[str(dev)].append(webview_context) - - steps.append(use_webview_provider) - def install_helper(apk, modules=None, fake_modules=None, permissions=None, - additional_locales=None): + additional_locales=None, + instant_app=False): + + @instrumentation_tracing.no_tracing + @trace_event.traced + def install_helper_internal(d, apk_path=None): + # pylint: disable=unused-argument + d.Install( + apk, + modules=modules, + fake_modules=fake_modules, + permissions=permissions, + additional_locales=additional_locales, + instant_app=instant_app, + force_queryable=self._test_instance.IsApkForceQueryable(apk)) + + return install_helper_internal + def install_apex_helper(apex): @instrumentation_tracing.no_tracing @trace_event.traced def install_helper_internal(d, apk_path=None): # pylint: disable=unused-argument - d.Install(apk, - modules=modules, - fake_modules=fake_modules, - permissions=permissions, - additional_locales=additional_locales) + d.InstallApex(apex) return install_helper_internal @@ -269,22 +290,33 @@ def incremental_install_helper(apk, json_path, permissions): def incremental_install_helper_internal(d, apk_path=None): # pylint: disable=unused-argument installer.Install(d, json_path, apk=apk, permissions=permissions) + return incremental_install_helper_internal + steps.extend( + install_apex_helper(apex) + for apex in self._test_instance.additional_apexs) + + steps.extend( + install_helper(apk, instant_app=self._test_instance.IsApkInstant(apk)) + for apk in self._test_instance.additional_apks) + permissions = self._test_instance.test_apk.GetPermissions() if self._test_instance.test_apk_incremental_install_json: - steps.append(incremental_install_helper( - self._test_instance.test_apk, - self._test_instance. - test_apk_incremental_install_json, - permissions)) + if self._test_instance.test_apk_as_instant: + raise Exception('Test APK cannot be installed as an instant ' + 'app if it is incremental') + + steps.append( + incremental_install_helper( + self._test_instance.test_apk, + self._test_instance.test_apk_incremental_install_json, + permissions)) else: steps.append( - install_helper( - self._test_instance.test_apk, permissions=permissions)) - - steps.extend( - install_helper(apk) for apk in self._test_instance.additional_apks) + install_helper(self._test_instance.test_apk, + permissions=permissions, + instant_app=self._test_instance.test_apk_as_instant)) # We'll potentially need the package names later for setting app # compatibility workarounds. @@ -292,6 +324,48 @@ def incremental_install_helper_internal(d, apk_path=None): [self._test_instance.test_apk]): self._installed_packages.append(apk_helper.GetPackageName(apk)) + if self._test_instance.use_webview_provider: + + @trace_event.traced + def use_webview_provider(dev): + # We need the context manager to be applied before modifying any + # shared preference files in case the replacement APK needs to be + # set up, and it needs to be applied while the test is running. + # Thus, it needs to be applied early during setup, but must still be + # applied during _RunTest, which isn't possible using 'with' without + # applying the context manager up in test_runner. Instead, we + # manually invoke its __enter__ and __exit__ methods in setup and + # teardown. + # We do this after installing additional APKs so that + # we can install trichrome library before installing the webview + # provider + webview_context = webview_app.UseWebViewProvider( + dev, self._test_instance.use_webview_provider) + # Pylint is not smart enough to realize that this field has + # an __enter__ method, and will complain loudly. + # pylint: disable=no-member + webview_context.__enter__() + # pylint: enable=no-member + self._context_managers[str(dev)].append(webview_context) + + steps.append(use_webview_provider) + + if self._test_instance.use_voice_interaction_service: + + @trace_event.traced + def use_voice_interaction_service(device): + voice_interaction_service_context = _VoiceInteractionService( + device, self._test_instance.use_voice_interaction_service) + # Pylint is not smart enough to realize that this field has + # an __enter__ method, and will complain loudly. + # pylint: disable=no-member + voice_interaction_service_context.__enter__() + # pylint: enable=no-member + self._context_managers[str(device)].append( + voice_interaction_service_context) + + steps.append(use_voice_interaction_service) + # The apk under test needs to be installed last since installing other # apks after will unintentionally clear the fake module directory. # TODO(wnwen): Make this more robust, fix crbug.com/1010954. @@ -312,6 +386,17 @@ def incremental_install_helper_internal(d, apk_path=None): self._test_instance.fake_modules, permissions, self._test_instance.additional_locales)) + # Execute any custom setup shell commands + if self._test_instance.run_setup_commands: + + @trace_event.traced + def run_setup_commands(dev): + for cmd in self._test_instance.run_setup_commands: + logging.info('Running custom setup shell command: %s', cmd) + dev.RunShellCommand(cmd, shell=True, check_return=True) + + steps.append(run_setup_commands) + @trace_event.traced def set_debug_app(dev): # Set debug app in order to enable reading command line flags on user @@ -319,7 +404,7 @@ def set_debug_app(dev): cmd = ['am', 'set-debug-app', '--persistent'] if self._test_instance.wait_for_java_debugger: cmd.append('-w') - cmd.append(target_package) + cmd.append(self._target_package) dev.RunShellCommand(cmd, check_return=True) @trace_event.traced @@ -335,6 +420,10 @@ def edit_shared_prefs(dev): shared_preference_utils.ApplySharedPreferenceSetting( shared_pref, setting) + @trace_event.traced + def approve_app_links(dev): + self._ToggleAppLinks(dev, 'STATE_APPROVED') + @trace_event.traced def set_vega_permissions(dev): # Normally, installation of VrCore automatically grants storage @@ -350,20 +439,32 @@ def set_vega_permissions(dev): @instrumentation_tracing.no_tracing def push_test_data(dev): - device_root = posixpath.join(dev.GetExternalStoragePath(), - 'chromium_tests_root') + test_data_root_dir = posixpath.join( + self._GetDataStorageRootDirectory(dev), 'chromium_tests_root') host_device_tuples_substituted = [ - (h, local_device_test_run.SubstituteDeviceRoot(d, device_root)) - for h, d in host_device_tuples] + (h, + local_device_test_run.SubstituteDeviceRoot(d, test_data_root_dir)) + for h, d in host_device_tuples + ] logging.info('Pushing data dependencies.') for h, d in host_device_tuples_substituted: logging.debug(' %r -> %r', h, d) - local_device_environment.place_nomedia_on_device(dev, device_root) + + as_root = self._test_instance.store_data_in_app_directory + local_device_environment.place_nomedia_on_device(dev, + test_data_root_dir, + as_root=as_root) dev.PushChangedFiles(host_device_tuples_substituted, - delete_device_stale=True) + delete_device_stale=True, + as_root=as_root) + if not host_device_tuples_substituted: - dev.RunShellCommand(['rm', '-rf', device_root], check_return=True) - dev.RunShellCommand(['mkdir', '-p', device_root], check_return=True) + dev.RunShellCommand(['rm', '-rf', test_data_root_dir], + check_return=True, + as_root=as_root) + dev.RunShellCommand(['mkdir', '-p', test_data_root_dir], + check_return=True, + as_root=as_root) @trace_event.traced def create_flag_changer(dev): @@ -377,8 +478,8 @@ def create_flag_changer(dev): dev, self._test_instance.timeout_scale) steps += [ - set_debug_app, edit_shared_prefs, push_test_data, create_flag_changer, - set_vega_permissions + set_debug_app, edit_shared_prefs, approve_app_links, push_test_data, + create_flag_changer, set_vega_permissions, DismissCrashDialogs ] def bind_crash_handler(step, dev): @@ -425,7 +526,7 @@ def bind_crash_handler(step, dev): if self._test_instance.wait_for_java_debugger: logging.warning('*' * 80) logging.warning('Waiting for debugger to attach to process: %s', - target_package) + self._target_package) logging.warning('*' * 80) #override @@ -451,6 +552,11 @@ def individual_device_tear_down(dev): # Remove package-specific configuration dev.RunShellCommand(['am', 'clear-debug-app'], check_return=True) + # Execute any custom teardown shell commands + for cmd in self._test_instance.run_teardown_commands: + logging.info('Running custom teardown shell command: %s', cmd) + dev.RunShellCommand(cmd, shell=True, check_return=True) + valgrind_tools.SetChromeTimeoutScale(dev, None) # Restore any shared preference files that we stored during setup. @@ -460,6 +566,9 @@ def individual_device_tear_down(dev): for pref_to_restore in self._shared_prefs_to_restore: pref_to_restore.Commit(force_commit=True) + # If we've force approved app links for a package, undo that now. + self._ToggleAppLinks(dev, 'STATE_NO_RESPONSE') + # Context manager exit handlers are applied in reverse order # of the enter handlers. for context in reversed(self._context_managers[str(dev)]): @@ -470,6 +579,24 @@ def individual_device_tear_down(dev): self._env.parallel_devices.pMap(individual_device_tear_down) + def _ToggleAppLinks(self, dev, state): + # The set-app-links command was added in Android 12 (sdk = 31). The + # restrictions that require us to set the app links were also added in + # Android 12, so doing nothing on earlier Android versions is fine. + if dev.build_version_sdk < version_codes.S: + return + + package = self._test_instance.approve_app_links_package + domain = self._test_instance.approve_app_links_domain + + if not package or not domain: + return + + cmd = [ + 'pm', 'set-app-links', '--package', package, state, domain + ] + dev.RunShellCommand(cmd, check_return=True) + def _CreateFlagChangerIfNeeded(self, device): if str(device) not in self._flag_changers: cmdline_file = 'test-cmdline-file' @@ -483,7 +610,16 @@ def _CreateFlagChangerIfNeeded(self, device): device, cmdline_file) #override - def _CreateShards(self, tests): + def _CreateShardsForDevices(self, tests): + """Create shards of tests to run on devices. + + Args: + tests: List containing tests or test batches. + + Returns: + List of tests or batches. + """ + # Each test or test batch will be a single shard. return tests #override @@ -498,6 +634,14 @@ def _GetTests(self): self._test_instance.total_external_shards) return tests + #override + def GetTestsForListing(self): + # Parent class implementation assumes _GetTests() returns strings rather + # than dicts. + test_dicts = self._GetTests() + test_dicts = local_device_test_run.FlattenTestList(test_dicts) + return sorted('{}#{}'.format(d['class'], d['method']) for d in test_dicts) + #override def _GroupTests(self, tests): batched_tests = dict() @@ -512,29 +656,60 @@ def _GroupTests(self, tests): # Feature flags won't work in instrumentation tests unless the activity # is restarted. # Tests with identical features are grouped to minimize restarts. - if 'Batch$SplitByFeature' in annotations: + # UnitTests that specify flags always use Features.JUnitProcessor, so + # they don't need to be split. + if batch_name != 'UnitTests': if 'Features$EnableFeatures' in annotations: batch_name += '|enabled:' + ','.join( sorted(annotations['Features$EnableFeatures']['value'])) if 'Features$DisableFeatures' in annotations: batch_name += '|disabled:' + ','.join( sorted(annotations['Features$DisableFeatures']['value'])) - - if not batch_name in batched_tests: - batched_tests[batch_name] = [] - batched_tests[batch_name].append(test) + if 'CommandLineFlags$Add' in annotations: + batch_name += '|cmd_line_add:' + ','.join( + sorted(annotations['CommandLineFlags$Add']['value'])) + if 'CommandLineFlags$Remove' in annotations: + batch_name += '|cmd_line_remove:' + ','.join( + sorted(annotations['CommandLineFlags$Remove']['value'])) + + batched_tests.setdefault(batch_name, []).append(test) else: other_tests.append(test) + def dict2list(d): + if isinstance(d, dict): + return sorted([(k, dict2list(v)) for k, v in d.items()]) + if isinstance(d, list): + return [dict2list(v) for v in d] + if isinstance(d, tuple): + return tuple(dict2list(v) for v in d) + return d + + test_count = sum( + [len(test) - 1 for test in tests if self._CountTestsIndividually(test)]) + test_count += len(tests) + if self._test_instance.total_external_shards > 1: + # Calculate suitable test batch max group size based on average partition + # size. The batch size should be below partition size to balance between + # shards. Choose to divide by 3 as it works fine with most of test suite + # without increasing too much setup/teardown time for batch tests. + test_batch_max_group_size = \ + max(1, test_count // self._test_instance.total_external_shards // 3) + else: + test_batch_max_group_size = _LOCAL_TEST_BATCH_MAX_GROUP_SIZE + all_tests = [] - for _, tests in batched_tests.items(): - tests.sort() # Ensure a consistent ordering across external shards. + for _, btests in list(batched_tests.items()): + # Ensure a consistent ordering across external shards. + btests.sort(key=dict2list) all_tests.extend([ - tests[i:i + _TEST_BATCH_MAX_GROUP_SIZE] - for i in range(0, len(tests), _TEST_BATCH_MAX_GROUP_SIZE) + btests[i:i + test_batch_max_group_size] + for i in range(0, len(btests), test_batch_max_group_size) ]) all_tests.extend(other_tests) - return all_tests + # Sort all tests by hash. + # TODO(crbug.com/1257820): Add sorting logic back to _PartitionTests. + return self._SortTests(all_tests) #override def _GetUniqueTestName(self, test): @@ -544,6 +719,9 @@ def _GetUniqueTestName(self, test): def _RunTest(self, device, test): extras = {} + if self._test_instance.is_unit_test: + extras[_EXTRA_TEST_IS_UNIT] = 'true' + # Provide package name under test for apk_under_test. if self._test_instance.apk_under_test: package_name = self._test_instance.apk_under_test.GetPackageName() @@ -556,8 +734,6 @@ def _RunTest(self, device, test): (test[0]['class'], test[0]['method']) if isinstance(test, list) else '%s_%s' % (test['class'], test['method'])) - if self._test_instance.jacoco_coverage_type: - coverage_basename += "_" + self._test_instance.jacoco_coverage_type extras['coverage'] = 'true' coverage_directory = os.path.join( device.GetExternalStoragePath(), 'chrome', 'test', 'coverage') @@ -567,6 +743,16 @@ def _RunTest(self, device, test): coverage_device_file = os.path.join(coverage_directory, coverage_basename) coverage_device_file += '.exec' extras['coverageFile'] = coverage_device_file + + if self._test_instance.enable_breakpad_dump: + # Use external storage directory so that the breakpad dump can be accessed + # by the test APK in addition to the apk_under_test. + breakpad_dump_directory = os.path.join(device.GetExternalStoragePath(), + 'chromium_dumps') + if device.PathExists(breakpad_dump_directory): + device.RemovePath(breakpad_dump_directory, recursive=True) + flags_to_add.append('--breakpad-dump-location=' + breakpad_dump_directory) + # Save screenshot if screenshot dir is specified (save locally) or if # a GS bucket is passed (save in cloud). screenshot_device_file = device_temp_file.DeviceTempFile( @@ -646,10 +832,12 @@ def name_and_timeout(t): wpr_archive_path, os.path.exists(wpr_archive_path))) + file_name = _GetWPRArchiveFileName( + test) or self._GetUniqueTestName(test) + '.wprgo' + # Some linux version does not like # in the name. Replaces it with __. - archive_path = os.path.join( - wpr_archive_path, - _ReplaceUncommonChars(self._GetUniqueTestName(test)) + '.wprgo') + archive_path = os.path.join(wpr_archive_path, + _ReplaceUncommonChars(file_name)) if not os.path.exists(_WPR_GO_LINUX_X86_64_PATH): # If we got to this stage, then we should have @@ -668,6 +856,9 @@ def name_and_timeout(t): self._CreateFlagChangerIfNeeded(device) self._flag_changers[str(device)].PushFlags(add=flags_to_add) + if self._test_instance.store_data_in_app_directory: + extras.update({'fetchTestDataFromAppDataDir': 'true'}) + time_ms = lambda: int(time.time() * 1e3) start_ms = time_ms() @@ -708,9 +899,14 @@ def handle_coverage_data(): try: if not os.path.exists(self._test_instance.coverage_directory): os.makedirs(self._test_instance.coverage_directory) - device.PullFile(coverage_device_file, - self._test_instance.coverage_directory) - device.RemovePath(coverage_device_file, True) + # Retries add time to test execution. + if device.PathExists(coverage_device_file, retries=0): + device.PullFile(coverage_device_file, + self._test_instance.coverage_directory) + device.RemovePath(coverage_device_file, True) + else: + logging.warning('Coverage file does not exist: %s', + coverage_device_file) except (OSError, base_error.BaseError) as e: logging.warning('Failed to handle coverage data after tests: %s', e) @@ -764,6 +960,25 @@ def stop_chrome_proxy(): self._chrome_proxy.wpr_archive_path) self._chrome_proxy = None + def pull_baseline_profile(): + # Search though status responses for the one with the key we are + # looking for. + for _, bundle in statuses: + baseline_profile_path = bundle.get( + 'additionalTestOutputFile_baseline-profile-ts') + if baseline_profile_path: + # Found it. + break + else: + # This test does not generate a baseline profile. + return + with self._env.output_manager.ArchivedTempfile( + 'baseline_profile.txt', 'baseline_profile') as baseline_profile: + device.PullFile(baseline_profile_path, baseline_profile.name) + _SetLinkOnResults(results, test_name, 'baseline_profile', + baseline_profile.Link()) + logging.warning('Baseline Profile Location %s', baseline_profile.Link()) + # While constructing the TestResult objects, we can parallelize several # steps that involve ADB. These steps should NOT depend on any info in @@ -771,7 +986,8 @@ def stop_chrome_proxy(): # determined. post_test_steps = [ restore_flags, restore_timeout_scale, stop_chrome_proxy, - handle_coverage_data, handle_render_test_data, pull_ui_screen_captures + handle_coverage_data, handle_render_test_data, + pull_ui_screen_captures, pull_baseline_profile ] if self._env.concurrent_adb: reraiser_thread.RunAsync(post_test_steps) @@ -798,10 +1014,24 @@ def stop_chrome_proxy(): # Update the result type if we detect a crash. try: - if DidPackageCrashOnDevice(self._test_instance.test_package, device): + crashed_packages = DismissCrashDialogs(device) + # Assume test package convention of ".test" suffix + if any(p in self._test_instance.test_package for p in crashed_packages): for r in results: if r.GetType() == base_test_result.ResultType.UNKNOWN: r.SetType(base_test_result.ResultType.CRASH) + elif (crashed_packages and len(results) == 1 + and results[0].GetType() != base_test_result.ResultType.PASS): + # Add log message and set failure reason if: + # 1) The app crash was likely not caused by the test. + # AND + # 2) The app crash possibly caused the test to fail. + # Crashes of the package under test are assumed to be the test's fault. + _AppendToLogForResult( + results[0], 'OS displayed error dialogs for {}'.format( + ', '.join(crashed_packages))) + results[0].SetFailureReason('{} Crashed'.format( + ','.join(crashed_packages))) except device_errors.CommandTimeoutError: logging.warning('timed out when detecting/dismissing error dialogs') # Attach screenshot to the test to help with debugging the dialog boxes. @@ -819,7 +1049,7 @@ def stop_chrome_proxy(): # Handle failures by: # - optionally taking a screenshot - # - logging the raw output at INFO level + # - logging the raw output at ERROR level # - clearing the application state while persisting permissions if any(r.GetType() not in (base_test_result.ResultType.PASS, base_test_result.ResultType.SKIP) @@ -827,17 +1057,17 @@ def stop_chrome_proxy(): self._SaveScreenshot(device, screenshot_device_file, test_display_name, results, 'post_test_screenshot') - logging.info('detected failure in %s. raw output:', test_display_name) + logging.error('detected failure in %s. raw output:', test_display_name) for l in output: - logging.info(' %s', l) - if (not self._env.skip_clear_data - and self._test_instance.package_info): - permissions = ( - self._test_instance.apk_under_test.GetPermissions() - if self._test_instance.apk_under_test - else None) - device.ClearApplicationState(self._test_instance.package_info.package, - permissions=permissions) + logging.error(' %s', l) + if not self._env.skip_clear_data: + if self._test_instance.package_info: + permissions = (self._test_instance.apk_under_test.GetPermissions() + if self._test_instance.apk_under_test else None) + device.ClearApplicationState(self._test_instance.package_info.package, + permissions=permissions) + if self._test_instance.enable_breakpad_dump: + device.RemovePath(breakpad_dump_directory, recursive=True) else: logging.debug('raw output from %s:', test_display_name) for l in output: @@ -935,11 +1165,15 @@ def _GetTestsFromRunner(self): logging.info('Could not get tests from pickle: %s', e) logging.info('Getting tests by having %s list them.', self._test_instance.junit4_runner_class) + # We need to use GetAppWritablePath instead of GetExternalStoragePath + # here because we will not have applied legacy storage workarounds on R+ + # yet. + # TODO(rmhasan): Figure out how to create the temp file inside the test + # app's data directory. Currently when the temp file is created read + # permissions are only given to the app's user id. Therefore we can't + # pull the file from the device. def list_tests(d): def _run(dev): - # We need to use GetAppWritablePath instead of GetExternalStoragePath - # here because we will not have applied legacy storage workarounds on R+ - # yet. with device_temp_file.DeviceTempFile( dev.adb, suffix='.json', dir=dev.GetAppWritablePath()) as dev_test_list_json: @@ -993,24 +1227,22 @@ def _ArchiveLogcat(self, device, test_name): logcat_file = None logmon = None try: - with self._env.output_manager.ArchivedTempfile( - stream_name, 'logcat') as logcat_file: + with self._env.output_manager.ArchivedTempfile(stream_name, + 'logcat') as logcat_file: with logcat_monitor.LogcatMonitor( device.adb, filter_specs=local_device_environment.LOGCAT_FILTERS, output_file=logcat_file.name, transform_func=self._test_instance.MaybeDeobfuscateLines, check_error=False) as logmon: - with _LogTestEndpoints(device, test_name): - with contextlib_ext.Optional( - trace_event.trace(test_name), - self._env.trace_output): - yield logcat_file + with contextlib_ext.Optional(trace_event.trace(test_name), + self._env.trace_output): + yield logcat_file finally: if logmon: logmon.Close() if logcat_file and logcat_file.Link(): - logging.info('Logcat saved to %s', logcat_file.Link()) + logging.critical('Logcat saved to %s', logcat_file.Link()) def _SaveTraceData(self, trace_device_file, device, test_class): trace_host_file = self._env.trace_output @@ -1018,8 +1250,8 @@ def _SaveTraceData(self, trace_device_file, device, test_class): if device.FileExists(trace_device_file.name): try: java_trace_json = device.ReadFile(trace_device_file.name) - except IOError: - raise Exception('error pulling trace file from device') + except IOError as e: + raise Exception('error pulling trace file from device') from e finally: trace_device_file.close() @@ -1134,8 +1366,12 @@ def _ProcessSkiaGoldRenderTestResults(self, device, results): # All the key/value pairs in the JSON file are strings, so convert # to a bool. json_dict = json.load(infile) - fail_on_unsupported = json_dict.get('fail_on_unsupported_configs', - 'false') + optional_dict = json_dict.get('optional_keys', {}) + if 'optional_keys' in json_dict: + should_rewrite = True + del json_dict['optional_keys'] + fail_on_unsupported = optional_dict.get('fail_on_unsupported_configs', + 'false') fail_on_unsupported = fail_on_unsupported.lower() == 'true' # Grab the full test name so we can associate the comparison with a # particular test, which is necessary if tests are batched together. @@ -1155,7 +1391,8 @@ def _ProcessSkiaGoldRenderTestResults(self, device, results): # should_ignore_in_gold != should_hide_failure. should_hide_failure = running_on_unsupported if should_ignore_in_gold: - should_rewrite = True + # This is put in the regular keys dict instead of the optional one + # because ignore rules do not apply to optional keys. json_dict['ignore'] = '1' if should_rewrite: with open(json_path, 'w') as outfile: @@ -1169,7 +1406,8 @@ def _ProcessSkiaGoldRenderTestResults(self, device, results): name=render_name, png_file=image_path, output_manager=self._env.output_manager, - use_luci=use_luci) + use_luci=use_luci, + optional_keys=optional_dict) except Exception as e: # pylint: disable=broad-except _FailTestIfNecessary(results, full_test_name) _AppendToLog(results, full_test_name, @@ -1277,7 +1515,13 @@ def _ShouldRetry(self, test, result): return True #override - def _ShouldShard(self): + def _ShouldShardTestsForDevices(self): + """Shard tests across several devices. + + Returns: + True if tests should be sharded across several devices, + False otherwise. + """ return True @classmethod @@ -1307,10 +1551,8 @@ def _IsWPRRecordReplayTest(test): """Determines whether a test or a list of tests is a WPR RecordReplay Test.""" if not isinstance(test, list): test = [test] - return any([ - WPR_RECORD_REPLAY_TEST_FEATURE_ANNOTATION in t['annotations'].get( - FEATURE_ANNOTATION, {}).get('value', ()) for t in test - ]) + return any(WPR_RECORD_REPLAY_TEST_FEATURE_ANNOTATION in t['annotations'].get( + FEATURE_ANNOTATION, {}).get('value', ()) for t in test) def _GetWPRArchivePath(test): @@ -1319,6 +1561,13 @@ def _GetWPRArchivePath(test): {}).get('value', ()) +def _GetWPRArchiveFileName(test): + """Retrieves the WPRArchiveDirectory.ArchiveName annotation.""" + value = test['annotations'].get(WPR_ARCHIVE_NAME_ANNOTATION, + {}).get('value', None) + return value[0] if value else None + + def _ReplaceUncommonChars(original): """Replaces uncommon characters with __.""" if not original: @@ -1334,8 +1583,8 @@ def _IsRenderTest(test): """Determines if a test or list of tests has a RenderTest amongst them.""" if not isinstance(test, list): test = [test] - return any([RENDER_TEST_FEATURE_ANNOTATION in t['annotations'].get( - FEATURE_ANNOTATION, {}).get('value', ()) for t in test]) + return any(RENDER_TEST_FEATURE_ANNOTATION in t['annotations'].get( + FEATURE_ANNOTATION, {}).get('value', ()) for t in test) def _GenerateRenderTestHtml(image_name, failure_link, golden_link, diff_link): @@ -1407,7 +1656,11 @@ def _AppendToLog(results, full_test_name, line): for result in results: if found_matching_test and result.GetName() != full_test_name: continue - result.SetLog(result.GetLog() + '\n' + line) + _AppendToLogForResult(result, line) + + +def _AppendToLogForResult(result, line): + result.SetLog(result.GetLog() + '\n' + line) def _SetLinkOnResults(results, full_test_name, link_name, link): @@ -1443,7 +1696,7 @@ def _MatchingTestInResults(results, full_test_name): True if one of the results in |results| has the same name as |full_test_name|, otherwise False. """ - return any([r for r in results if r.GetName() == full_test_name]) + return any(r for r in results if r.GetName() == full_test_name) def _ShouldReportNoMatchingResult(full_test_name): diff --git a/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py b/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py index 7870cd198d37..fb41572d3f30 100755 --- a/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py +++ b/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py @@ -1,5 +1,5 @@ -#!/usr/bin/env vpython -# Copyright 2017 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -7,8 +7,9 @@ # pylint: disable=protected-access -from __future__ import absolute_import + import unittest +import mock # pylint: disable=import-error from pylib.base import base_test_result from pylib.base import mock_environment @@ -19,7 +20,7 @@ class LocalDeviceInstrumentationTestRunTest(unittest.TestCase): def setUp(self): - super(LocalDeviceInstrumentationTestRunTest, self).setUp() + super().setUp() self._env = mock_environment.MockEnvironment() self._ti = mock_test_instance.MockTestInstance() self._obj = ( @@ -164,6 +165,33 @@ def testReplaceUncommonChars(self): with self.assertRaises(ValueError): local_device_instrumentation_test_run._ReplaceUncommonChars(original) + def testStoreDataInAppDir(self): + env = mock.MagicMock() + test_instance = mock.MagicMock() + test_instance.store_data_in_app_directory = True + device = mock.MagicMock() + + device.GetApplicationDataDirectory.return_value = 'app_dir' + device.GetExternalStoragePath.return_value = 'external_dir' + test_run = ( + local_device_instrumentation_test_run.LocalDeviceInstrumentationTestRun( + env, test_instance)) + self.assertEqual(test_run._GetDataStorageRootDirectory(device), 'app_dir') + + def testStoreDataInExternalDir(self): + env = mock.MagicMock() + test_instance = mock.MagicMock() + test_instance.store_data_in_app_directory = False + device = mock.MagicMock() + + device.GetApplicationDataDirectory.return_value = 'app_dir' + device.GetExternalStoragePath.return_value = 'external_dir' + test_run = ( + local_device_instrumentation_test_run.LocalDeviceInstrumentationTestRun( + env, test_instance)) + self.assertEqual(test_run._GetDataStorageRootDirectory(device), + 'external_dir') + if __name__ == '__main__': unittest.main(verbosity=2) diff --git a/build/android/pylib/local/device/local_device_monkey_test_run.py b/build/android/pylib/local/device/local_device_monkey_test_run.py index f0d233993524..e90cbbd27f84 100644 --- a/build/android/pylib/local/device/local_device_monkey_test_run.py +++ b/build/android/pylib/local/device/local_device_monkey_test_run.py @@ -1,8 +1,8 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import + import logging from six.moves import range # pylint: disable=redefined-builtin @@ -16,9 +16,6 @@ _CHROME_PACKAGE = constants.PACKAGE_INFO['chrome'].package class LocalDeviceMonkeyTestRun(local_device_test_run.LocalDeviceTestRun): - def __init__(self, env, test_instance): - super(LocalDeviceMonkeyTestRun, self).__init__(env, test_instance) - def TestPackage(self): return 'monkey' @@ -91,11 +88,26 @@ def TearDown(self): pass #override - def _CreateShards(self, tests): + def _CreateShardsForDevices(self, tests): + """Create shards of tests to run on devices. + + Args: + tests: List containing tests or test batches. + + Returns: + True if tests should be sharded across several devices, + False otherwise. + """ return tests #override - def _ShouldShard(self): + def _ShouldShardTestsForDevices(self): + """Shard tests across several devices. + + Returns: + True if tests should be sharded across several devices, + False otherwise. + """ # TODO(mikecase): Run Monkey test concurrently on each attached device. return False diff --git a/build/android/pylib/local/device/local_device_test_run.py b/build/android/pylib/local/device/local_device_test_run.py index 4381538ee26b..e3862d14b497 100644 --- a/build/android/pylib/local/device/local_device_test_run.py +++ b/build/android/pylib/local/device/local_device_test_run.py @@ -1,12 +1,16 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import fnmatch +import hashlib import logging import posixpath import signal -import thread +try: + import _thread as thread +except ImportError: + import thread import threading from devil import base_error @@ -30,10 +34,9 @@ def SubstituteDeviceRoot(device_path, device_root): if not device_path: return device_root - elif isinstance(device_path, list): + if isinstance(device_path, list): return posixpath.join(*(p if p else device_root for p in device_path)) - else: - return device_path + return device_path class TestsTerminated(Exception): @@ -42,22 +45,22 @@ class TestsTerminated(Exception): class InvalidShardingSettings(Exception): def __init__(self, shard_index, total_shards): - super(InvalidShardingSettings, self).__init__( - 'Invalid sharding settings. shard_index: %d total_shards: %d' - % (shard_index, total_shards)) + super().__init__( + 'Invalid sharding settings. shard_index: %d total_shards: %d' % + (shard_index, total_shards)) class LocalDeviceTestRun(test_run.TestRun): def __init__(self, env, test_instance): - super(LocalDeviceTestRun, self).__init__(env, test_instance) + super().__init__(env, test_instance) self._tools = {} # This is intended to be filled by a child class. self._installed_packages = [] env.SetPreferredAbis(test_instance.GetPreferredAbis()) #override - def RunTests(self, results): + def RunTests(self, results, raw_logs_fh=None): tests = self._GetTests() exit_now = threading.Event() @@ -105,22 +108,23 @@ def GetResultTypeForTest(t): if isinstance(test, list): results.AddResults( - base_test_result.BaseTestResult(self._GetUniqueTestName(t), - GetResultTypeForTest(t)) - for t in test) + base_test_result.BaseTestResult( + self._GetUniqueTestName(t), + base_test_result.ResultType.TIMEOUT) for t in test) else: results.AddResult( - base_test_result.BaseTestResult(self._GetUniqueTestName(test), - GetResultTypeForTest(test))) - except Exception as e: # pylint: disable=broad-except + base_test_result.BaseTestResult( + self._GetUniqueTestName(test), + base_test_result.ResultType.TIMEOUT)) + except device_errors.DeviceUnreachableError: + # If the device is no longer reachable then terminate this + # run_tests_on_device call. + raise + except base_error.BaseError: + # If we get a device error but believe the device is still + # reachable, attempt to continue using it. if isinstance(tests, test_collection.TestCollection): rerun = test - if (isinstance(e, device_errors.DeviceUnreachableError) - or not isinstance(e, base_error.BaseError)): - # If we get a device error but believe the device is still - # reachable, attempt to continue using it. Otherwise, raise - # the exception and terminate this run_tests_on_device call. - raise consecutive_device_errors += 1 if consecutive_device_errors >= 3: @@ -187,9 +191,9 @@ def stop_tests(_signum, _frame): results.append(try_results) try: - if self._ShouldShard(): + if self._ShouldShardTestsForDevices(): tc = test_collection.TestCollection( - self._CreateShards(grouped_tests)) + self._CreateShardsForDevices(grouped_tests)) self._env.parallel_devices.pMap( run_tests_on_device, tc, try_results).pGet(None) else: @@ -234,17 +238,15 @@ def is_failure_result(test_result): tests_and_results = {} for test, name in tests_and_names: if name.endswith('*'): - tests_and_results[name] = ( - test, - [r for n, r in all_test_results.iteritems() - if fnmatch.fnmatch(n, name)]) + tests_and_results[name] = (test, [ + r for n, r in all_test_results.items() if fnmatch.fnmatch(n, name) + ]) else: tests_and_results[name] = (test, all_test_results.get(name)) - failed_tests_and_results = ( - (test, result) for test, result in tests_and_results.itervalues() - if is_failure_result(result) - ) + failed_tests_and_results = ((test, result) + for test, result in tests_and_results.values() + if is_failure_result(result)) return [t for t, r in failed_tests_and_results if self._ShouldRetry(t, r)] @@ -257,6 +259,10 @@ def _ApplyExternalSharding(self, tests, shard_index, total_shards): sharded_tests = [] + # Sort tests by hash. + # TODO(crbug.com/1257820): Add sorting logic back to _PartitionTests. + tests = self._SortTests(tests) + # Group tests by tests that should run in the same test invocation - either # unit tests or batched tests. grouped_tests = self._GroupTests(tests) @@ -273,6 +279,14 @@ def _ApplyExternalSharding(self, tests, shard_index, total_shards): sharded_tests.append(t) return sharded_tests + # Sort by hash so we don't put all tests in a slow suite in the same + # partition. + def _SortTests(self, tests): + return sorted(tests, + key=lambda t: hashlib.sha256( + self._GetUniqueTestName(t[0] if isinstance(t, list) else t + ).encode()).hexdigest()) + # Partition tests evenly into |num_desired_partitions| partitions where # possible. However, many constraints make partitioning perfectly impossible. # If the max_partition_size isn't large enough, extra partitions may be @@ -286,24 +300,9 @@ def _PartitionTests(self, tests, num_desired_partitions, max_partition_size): # pylint: disable=no-self-use partitions = [] - # Sort by hash so we don't put all tests in a slow suite in the same - # partition. - tests = sorted( - tests, - key=lambda t: hash( - self._GetUniqueTestName(t[0] if isinstance(t, list) else t))) - - def CountTestsIndividually(test): - if not isinstance(test, list): - return False - annotations = test[0]['annotations'] - # UnitTests tests are really fast, so to balance shards better, count - # UnitTests Batches as single tests. - return ('Batch' not in annotations - or annotations['Batch']['value'] != 'UnitTests') num_not_yet_allocated = sum( - [len(test) - 1 for test in tests if CountTestsIndividually(test)]) + [len(test) - 1 for test in tests if self._CountTestsIndividually(test)]) num_not_yet_allocated += len(tests) # Fast linear partition approximation capped by max_partition_size. We @@ -314,8 +313,7 @@ def CountTestsIndividually(test): partitions.append([]) last_partition_size = 0 for test in tests: - test_count = len(test) if CountTestsIndividually(test) else 1 - num_not_yet_allocated -= test_count + test_count = len(test) if self._CountTestsIndividually(test) else 1 # Make a new shard whenever we would overfill the previous one. However, # if the size of the test group is larger than the max partition size on # its own, just put the group in its own shard instead of splitting up the @@ -323,9 +321,6 @@ def CountTestsIndividually(test): if (last_partition_size + test_count > partition_size and last_partition_size > 0): num_desired_partitions -= 1 - partitions.append([]) - partitions[-1].append(test) - last_partition_size = test_count if num_desired_partitions <= 0: # Too many tests for number of partitions, just fill all partitions # beyond num_desired_partitions. @@ -334,21 +329,36 @@ def CountTestsIndividually(test): # Re-balance remaining partitions. partition_size = min(num_not_yet_allocated // num_desired_partitions, max_partition_size) + partitions.append([]) + partitions[-1].append(test) + last_partition_size = test_count else: partitions[-1].append(test) last_partition_size += test_count + num_not_yet_allocated -= test_count + if not partitions[-1]: partitions.pop() return partitions + def _CountTestsIndividually(self, test): + # pylint: disable=no-self-use + if not isinstance(test, list): + return False + annotations = test[0]['annotations'] + # UnitTests tests are really fast, so to balance shards better, count + # UnitTests Batches as single tests. + return ('Batch' not in annotations + or annotations['Batch']['value'] != 'UnitTests') + def GetTool(self, device): if str(device) not in self._tools: self._tools[str(device)] = valgrind_tools.CreateTool( self._env.tool, device) return self._tools[str(device)] - def _CreateShards(self, tests): + def _CreateShardsForDevices(self, tests): raise NotImplementedError def _GetUniqueTestName(self, test): @@ -359,6 +369,13 @@ def _ShouldRetry(self, test, result): # pylint: disable=no-self-use,unused-argument return True + #override + def GetTestsForListing(self): + ret = self._GetTests() + ret = FlattenTestList(ret) + ret.sort() + return ret + def _GetTests(self): raise NotImplementedError @@ -369,10 +386,21 @@ def _GroupTests(self, tests): def _RunTest(self, device, test): raise NotImplementedError - def _ShouldShard(self): + def _ShouldShardTestsForDevices(self): raise NotImplementedError +def FlattenTestList(values): + """Returns a list with all nested lists (shard groupings) expanded.""" + ret = [] + for v in values: + if isinstance(v, list): + ret += v + else: + ret.append(v) + return ret + + def SetAppCompatibilityFlagsIfNecessary(packages, device): """Sets app compatibility flags on the given packages and device. diff --git a/build/android/pylib/local/device/local_device_test_run_test.py b/build/android/pylib/local/device/local_device_test_run_test.py index 77bbc2e1fdb3..5f0068ae03f8 100755 --- a/build/android/pylib/local/device/local_device_test_run_test.py +++ b/build/android/pylib/local/device/local_device_test_run_test.py @@ -1,11 +1,11 @@ -#!/usr/bin/env vpython -# Copyright 2016 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # pylint: disable=protected-access -from __future__ import absolute_import + import unittest from pylib.base import base_test_result @@ -17,29 +17,27 @@ class SubstituteDeviceRootTest(unittest.TestCase): def testNoneDevicePath(self): - self.assertEquals( + self.assertEqual( '/fake/device/root', - local_device_test_run.SubstituteDeviceRoot( - None, '/fake/device/root')) + local_device_test_run.SubstituteDeviceRoot(None, '/fake/device/root')) def testStringDevicePath(self): - self.assertEquals( + self.assertEqual( '/another/fake/device/path', - local_device_test_run.SubstituteDeviceRoot( - '/another/fake/device/path', '/fake/device/root')) + local_device_test_run.SubstituteDeviceRoot('/another/fake/device/path', + '/fake/device/root')) def testListWithNoneDevicePath(self): - self.assertEquals( + self.assertEqual( '/fake/device/root/subpath', - local_device_test_run.SubstituteDeviceRoot( - [None, 'subpath'], '/fake/device/root')) + local_device_test_run.SubstituteDeviceRoot([None, 'subpath'], + '/fake/device/root')) def testListWithoutNoneDevicePath(self): - self.assertEquals( + self.assertEqual( '/another/fake/device/path', local_device_test_run.SubstituteDeviceRoot( - ['/', 'another', 'fake', 'device', 'path'], - '/fake/device/root')) + ['/', 'another', 'fake', 'device', 'path'], '/fake/device/root')) class TestLocalDeviceTestRun(local_device_test_run.LocalDeviceTestRun): @@ -47,8 +45,7 @@ class TestLocalDeviceTestRun(local_device_test_run.LocalDeviceTestRun): # pylint: disable=abstract-method def __init__(self): - super(TestLocalDeviceTestRun, self).__init__( - mock.MagicMock(), mock.MagicMock()) + super().__init__(mock.MagicMock(), mock.MagicMock()) class TestLocalDeviceNonStringTestRun( @@ -57,8 +54,7 @@ class TestLocalDeviceNonStringTestRun( # pylint: disable=abstract-method def __init__(self): - super(TestLocalDeviceNonStringTestRun, self).__init__( - mock.MagicMock(), mock.MagicMock()) + super().__init__(mock.MagicMock(), mock.MagicMock()) def _GetUniqueTestName(self, test): return test['name'] @@ -66,6 +62,11 @@ def _GetUniqueTestName(self, test): class LocalDeviceTestRunTest(unittest.TestCase): + def testSortTests(self): + test_run = TestLocalDeviceTestRun() + self.assertEqual(test_run._SortTests(['a', 'b', 'c', 'd', 'e', 'f', 'g']), + ['d', 'f', 'c', 'b', 'e', 'a', 'g']) + def testGetTestsToRetry_allTestsPassed(self): results = [ base_test_result.BaseTestResult( @@ -80,7 +81,7 @@ def testGetTestsToRetry_allTestsPassed(self): test_run = TestLocalDeviceTestRun() tests_to_retry = test_run._GetTestsToRetry(tests, try_results) - self.assertEquals(0, len(tests_to_retry)) + self.assertEqual(0, len(tests_to_retry)) def testGetTestsToRetry_testFailed(self): results = [ @@ -96,7 +97,7 @@ def testGetTestsToRetry_testFailed(self): test_run = TestLocalDeviceTestRun() tests_to_retry = test_run._GetTestsToRetry(tests, try_results) - self.assertEquals(1, len(tests_to_retry)) + self.assertEqual(1, len(tests_to_retry)) self.assertIn('Test1', tests_to_retry) def testGetTestsToRetry_testUnknown(self): @@ -111,7 +112,7 @@ def testGetTestsToRetry_testUnknown(self): test_run = TestLocalDeviceTestRun() tests_to_retry = test_run._GetTestsToRetry(tests, try_results) - self.assertEquals(1, len(tests_to_retry)) + self.assertEqual(1, len(tests_to_retry)) self.assertIn('Test1', tests_to_retry) def testGetTestsToRetry_wildcardFilter_allPass(self): @@ -128,7 +129,7 @@ def testGetTestsToRetry_wildcardFilter_allPass(self): test_run = TestLocalDeviceTestRun() tests_to_retry = test_run._GetTestsToRetry(tests, try_results) - self.assertEquals(0, len(tests_to_retry)) + self.assertEqual(0, len(tests_to_retry)) def testGetTestsToRetry_wildcardFilter_oneFails(self): results = [ @@ -144,7 +145,7 @@ def testGetTestsToRetry_wildcardFilter_oneFails(self): test_run = TestLocalDeviceTestRun() tests_to_retry = test_run._GetTestsToRetry(tests, try_results) - self.assertEquals(1, len(tests_to_retry)) + self.assertEqual(1, len(tests_to_retry)) self.assertIn('TestCase.*', tests_to_retry) def testGetTestsToRetry_nonStringTests(self): @@ -164,9 +165,9 @@ def testGetTestsToRetry_nonStringTests(self): test_run = TestLocalDeviceNonStringTestRun() tests_to_retry = test_run._GetTestsToRetry(tests, try_results) - self.assertEquals(1, len(tests_to_retry)) + self.assertEqual(1, len(tests_to_retry)) self.assertIsInstance(tests_to_retry[0], dict) - self.assertEquals(tests[1], tests_to_retry[0]) + self.assertEqual(tests[1], tests_to_retry[0]) if __name__ == '__main__': diff --git a/build/android/pylib/local/emulator/OWNERS b/build/android/pylib/local/emulator/OWNERS index 0853590d4b8a..36abc1801d80 100644 --- a/build/android/pylib/local/emulator/OWNERS +++ b/build/android/pylib/local/emulator/OWNERS @@ -1,4 +1,3 @@ bpastene@chromium.org hypan@google.com jbudorick@chromium.org -liaoyuke@chromium.org diff --git a/build/android/pylib/local/emulator/__init__.py b/build/android/pylib/local/emulator/__init__.py index 4a12e35c9256..401c54b0d9c6 100644 --- a/build/android/pylib/local/emulator/__init__.py +++ b/build/android/pylib/local/emulator/__init__.py @@ -1,3 +1,3 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/local/emulator/avd.py b/build/android/pylib/local/emulator/avd.py index 51365eb232f5..62db9b591aab 100644 --- a/build/android/pylib/local/emulator/avd.py +++ b/build/android/pylib/local/emulator/avd.py @@ -1,9 +1,10 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import +import collections import contextlib +import glob import json import logging import os @@ -11,11 +12,15 @@ import stat import subprocess import threading +import time from google.protobuf import text_format # pylint: disable=import-error +from devil.android import apk_helper from devil.android import device_utils +from devil.android import settings from devil.android.sdk import adb_wrapper +from devil.android.tools import system_app from devil.utils import cmd_helper from devil.utils import timeout_retry from py_utils import tempfile_ext @@ -23,9 +28,23 @@ from pylib.local.emulator import ini from pylib.local.emulator.proto import avd_pb2 -_ALL_PACKAGES = object() -_DEFAULT_AVDMANAGER_PATH = os.path.join( - constants.ANDROID_SDK_ROOT, 'cmdline-tools', 'latest', 'bin', 'avdmanager') +# A common root directory to store the CIPD packages for creating or starting +# the emulator instance, e.g. emulator binary, system images, AVDs. +COMMON_CIPD_ROOT = os.path.join(constants.DIR_SOURCE_ROOT, '.android_emulator') + +# Packages that are needed for runtime. +_PACKAGES_RUNTIME = object() +# Packages that are needed during AVD creation. +_PACKAGES_CREATION = object() +# All the packages that could exist in the AVD config file. +_PACKAGES_ALL = object() + +# These files are used as backing files for corresponding qcow2 images. +_BACKING_FILES = ('system.img', 'vendor.img') + +_DEFAULT_AVDMANAGER_PATH = os.path.join(constants.ANDROID_SDK_ROOT, + 'cmdline-tools', 'latest', 'bin', + 'avdmanager') # Default to a 480dp mdpi screen (a relatively large phone). # See https://developer.android.com/training/multiscreen/screensizes # and https://developer.android.com/training/multiscreen/screendensities @@ -34,6 +53,22 @@ _DEFAULT_SCREEN_HEIGHT = 960 _DEFAULT_SCREEN_WIDTH = 480 +# Default to swiftshader_indirect since it works for most cases. +_DEFAULT_GPU_MODE = 'swiftshader_indirect' + +# The snapshot name to load/save when writable_system=False. +# This is the default name used by the emulator binary. +_DEFAULT_SNAPSHOT_NAME = 'default_boot' + +# crbug.com/1275767: Set long press timeout to 1000ms to reduce the flakiness +# caused by click being incorrectly interpreted as longclick. +_LONG_PRESS_TIMEOUT = '1000' + +# The snapshot name to load/save when writable_system=True +_SYSTEM_SNAPSHOT_NAME = 'boot_with_system' + +_SDCARD_NAME = 'cr-sdcard.img' + class AvdException(Exception): """Raised when this module has a problem interacting with an AVD.""" @@ -49,6 +84,8 @@ def __init__(self, summary, command=None, stdout=None, stderr=None): message_parts.append(' stderr:') message_parts.extend(' %s' % line for line in stderr.splitlines()) + # avd.py is executed with python2. + # pylint: disable=R1725 super(AvdException, self).__init__('\n'.join(message_parts)) @@ -64,7 +101,47 @@ def _Load(avd_proto_path): return text_format.Merge(avd_proto_file.read(), avd_pb2.Avd()) -class _AvdManagerAgent(object): +def _FindMinSdkFile(apk_dir, min_sdk): + """Finds the apk file associated with the min_sdk file. + + This reads a version.json file located in the apk_dir to find an apk file + that is closest without going over the min_sdk. + + Args: + apk_dir: The directory to look for apk files. + min_sdk: The minimum sdk version supported by the device. + + Returns: + The path to the file that suits the minSdkFile or None + """ + json_file = os.path.join(apk_dir, 'version.json') + if not os.path.exists(json_file): + logging.error('Json version file not found: %s', json_file) + return None + + min_sdk_found = None + curr_min_sdk_version = 0 + with open(json_file) as f: + data = json.loads(f.read()) + # Finds the entry that is closest to min_sdk without going over. + for entry in data: + if (entry['min_sdk'] > curr_min_sdk_version + and entry['min_sdk'] <= min_sdk): + min_sdk_found = entry + curr_min_sdk_version = entry['min_sdk'] + + if not min_sdk_found: + logging.error('No suitable apk file found that suits the minimum sdk %d.', + min_sdk) + return None + + logging.info('Found apk file for mininum sdk %d: %r with version %r', + min_sdk, min_sdk_found['file_name'], + min_sdk_found['version_name']) + return os.path.join(apk_dir, min_sdk_found['file_name']) + + +class _AvdManagerAgent: """Private utility for interacting with avdmanager.""" def __init__(self, avd_home, sdk_root): @@ -92,6 +169,8 @@ def __init__(self, avd_home, sdk_root): self._avd_home, 'AVDMANAGER_OPTS': '-Dcom.android.sdkmanager.toolsdir=%s' % fake_tools_dir, + 'JAVA_HOME': + constants.JAVA_HOME, }) def Create(self, avd_name, system_image, force=False): @@ -116,19 +195,17 @@ def Create(self, avd_name, system_image, force=False): if force: create_cmd += ['--force'] - create_proc = cmd_helper.Popen( - create_cmd, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - env=self._env) + create_proc = cmd_helper.Popen(create_cmd, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=self._env) output, error = create_proc.communicate(input='\n') if create_proc.returncode != 0: - raise AvdException( - 'AVD creation failed', - command=create_cmd, - stdout=output, - stderr=error) + raise AvdException('AVD creation failed', + command=create_cmd, + stdout=output, + stderr=error) for line in output.splitlines(): logging.info(' %s', line) @@ -151,10 +228,28 @@ def Delete(self, avd_name): for line in cmd_helper.IterCmdOutputLines(delete_cmd, env=self._env): logging.info(' %s', line) except subprocess.CalledProcessError as e: + # avd.py is executed with python2. + # pylint: disable=W0707 raise AvdException('AVD deletion failed: %s' % str(e), command=delete_cmd) + def List(self): + """List existing AVDs by the name.""" + list_cmd = [ + _DEFAULT_AVDMANAGER_PATH, + '-v', + 'list', + 'avd', + '-c', + ] + output = cmd_helper.GetCmdOutput(list_cmd, env=self._env) + return output.splitlines() + + def IsAvailable(self, avd_name): + """Check if an AVD exists or not.""" + return avd_name in self.List() -class AvdConfig(object): + +class AvdConfig: """Represents a particular AVD configuration. This class supports creation, installation, and execution of an AVD @@ -168,26 +263,157 @@ def __init__(self, avd_proto_path): Args: avd_proto_path: path to a textpb file containing an Avd message. """ + self.avd_proto_path = avd_proto_path self._config = _Load(avd_proto_path) - self._emulator_home = os.path.join(constants.DIR_SOURCE_ROOT, - self._config.avd_package.dest_path) - self._emulator_sdk_root = os.path.join( - constants.DIR_SOURCE_ROOT, self._config.emulator_package.dest_path) - self._emulator_path = os.path.join(self._emulator_sdk_root, 'emulator', - 'emulator') - self._initialized = False self._initializer_lock = threading.Lock() + @property + def emulator_home(self): + """User-specific emulator configuration directory. + + It corresponds to the environment variable $ANDROID_EMULATOR_HOME. + Configs like advancedFeatures.ini are expected to be under this dir. + """ + return os.path.join(COMMON_CIPD_ROOT, self._config.avd_package.dest_path) + + @property + def emulator_sdk_root(self): + """The path to the SDK installation directory. + + It corresponds to the environment variable $ANDROID_HOME. + + To be a valid sdk root, it requires to have the subdirecotries "platforms" + and "platform-tools". See http://bit.ly/2YAkyFE for context. + + Also, it is expected to have subdirecotries "emulator" and "system-images". + """ + emulator_sdk_root = os.path.join(COMMON_CIPD_ROOT, + self._config.emulator_package.dest_path) + # Ensure this is a valid sdk root. + required_dirs = [ + os.path.join(emulator_sdk_root, 'platforms'), + os.path.join(emulator_sdk_root, 'platform-tools'), + ] + for d in required_dirs: + if not os.path.exists(d): + os.makedirs(d) + + return emulator_sdk_root + + @property + def emulator_path(self): + """The path to the emulator binary.""" + return os.path.join(self.emulator_sdk_root, 'emulator', 'emulator') + + @property + def qemu_img_path(self): + """The path to the qemu-img binary. + + This is used to rebase the paths in qcow2 images. + """ + return os.path.join(self.emulator_sdk_root, 'emulator', 'qemu-img') + + @property + def mksdcard_path(self): + """The path to the mksdcard binary. + + This is used to create a sdcard image. + """ + return os.path.join(self.emulator_sdk_root, 'emulator', 'mksdcard') + @property def avd_settings(self): + """The AvdSettings in the avd proto file. + + This defines how to configure the AVD at creation. + """ return self._config.avd_settings + @property + def avd_name(self): + """The name of the AVD to create or use.""" + return self._config.avd_name + + @property + def avd_home(self): + """The path that contains the files of one or multiple AVDs.""" + avd_home = os.path.join(self.emulator_home, 'avd') + if not os.path.exists(avd_home): + os.makedirs(avd_home) + + return avd_home + + @property + def _avd_dir(self): + """The path that contains the files of the given AVD.""" + return os.path.join(self.avd_home, '%s.avd' % self.avd_name) + + @property + def _system_image_dir(self): + """The path of the directory that directly contains the system images. + + For example, if the system_image_name is + "system-images;android-33;google_apis;x86_64" + + The _system_image_dir will be: + //system-images/android-33/google_apis/x86_64 + + This is used to rebase the paths in qcow2 images. + """ + return os.path.join(COMMON_CIPD_ROOT, + self._config.system_image_package.dest_path, + *self._config.system_image_name.split(';')) + + @property + def _root_ini_path(self): + """The .ini file of the given AVD.""" + return os.path.join(self.avd_home, '%s.ini' % self.avd_name) + + @property + def _config_ini_path(self): + """The config.ini file under _avd_dir.""" + return os.path.join(self._avd_dir, 'config.ini') + + @property + def _features_ini_path(self): + return os.path.join(self.emulator_home, 'advancedFeatures.ini') + + @property + def xdg_config_dir(self): + """The base directory to store qt config file. + + This dir should be added to the env variable $XDG_CONFIG_DIRS so that + _qt_config_path can take effect. See https://bit.ly/3HIQRZ3 for context. + """ + config_dir = os.path.join(self.emulator_home, '.config') + if not os.path.exists(config_dir): + os.makedirs(config_dir) + + return config_dir + + @property + def _qt_config_path(self): + """The qt config file for emulator.""" + qt_config_dir = os.path.join(self.xdg_config_dir, + 'Android Open Source Project') + if not os.path.exists(qt_config_dir): + os.makedirs(qt_config_dir) + + return os.path.join(qt_config_dir, 'Emulator.conf') + + def HasSnapshot(self, snapshot_name): + """Check if a given snapshot exists or not.""" + snapshot_path = os.path.join(self._avd_dir, 'snapshots', snapshot_name) + return os.path.exists(snapshot_path) + def Create(self, force=False, snapshot=False, keep=False, + additional_apks=None, + privileged_apk_tuples=None, cipd_json_output=None, dry_run=False): """Create an instance of the AVD CIPD package. @@ -197,7 +423,8 @@ def Create(self, - creates the AVD - modifies the AVD's ini files to support running chromium tests in chromium infrastructure - - optionally starts & stops the AVD for snapshotting (default no) + - optionally starts, installs additional apks and/or privileged apks, and + stops the AVD for snapshotting (default no) - By default creates and uploads an instance of the AVD CIPD package (can be turned off by dry_run flag). - optionally deletes the AVD (default yes) @@ -208,51 +435,46 @@ def Create(self, the CIPD package. keep: bool indicating whether to keep the AVD after creating the CIPD package. + additional_apks: a list of strings contains the paths to the APKs. These + APKs will be installed after AVD is started. + privileged_apk_tuples: a list of (apk_path, device_partition) tuples where + |apk_path| is a string containing the path to the APK, and + |device_partition| is a string indicating the system image partition on + device that contains "priv-app" directory, e.g. "/system", "/product". cipd_json_output: string path to pass to `cipd create` via -json-output. dry_run: When set to True, it will skip the CIPD package creation after creating the AVD. """ logging.info('Installing required packages.') - self._InstallCipdPackages(packages=[ - self._config.emulator_package, - self._config.system_image_package, - ]) - - android_avd_home = os.path.join(self._emulator_home, 'avd') + self._InstallCipdPackages(_PACKAGES_CREATION) - if not os.path.exists(android_avd_home): - os.makedirs(android_avd_home) - - avd_manager = _AvdManagerAgent( - avd_home=android_avd_home, sdk_root=self._emulator_sdk_root) + avd_manager = _AvdManagerAgent(avd_home=self.avd_home, + sdk_root=self.emulator_sdk_root) logging.info('Creating AVD.') - avd_manager.Create( - avd_name=self._config.avd_name, - system_image=self._config.system_image_name, - force=force) + avd_manager.Create(avd_name=self.avd_name, + system_image=self._config.system_image_name, + force=force) try: logging.info('Modifying AVD configuration.') # Clear out any previous configuration or state from this AVD. - root_ini = os.path.join(android_avd_home, - '%s.ini' % self._config.avd_name) - features_ini = os.path.join(self._emulator_home, 'advancedFeatures.ini') - avd_dir = os.path.join(android_avd_home, '%s.avd' % self._config.avd_name) - config_ini = os.path.join(avd_dir, 'config.ini') - - with ini.update_ini_file(root_ini) as root_ini_contents: - root_ini_contents['path.rel'] = 'avd/%s.avd' % self._config.avd_name + with ini.update_ini_file(self._root_ini_path) as r_ini_contents: + r_ini_contents['path.rel'] = 'avd/%s.avd' % self.avd_name - with ini.update_ini_file(features_ini) as features_ini_contents: + with ini.update_ini_file(self._features_ini_path) as f_ini_contents: # features_ini file will not be refreshed by avdmanager during # creation. So explicitly clear its content to exclude any leftover # from previous creation. - features_ini_contents.clear() - features_ini_contents.update(self.avd_settings.advanced_features) + f_ini_contents.clear() + f_ini_contents.update(self.avd_settings.advanced_features) + + with ini.update_ini_file(self._config_ini_path) as config_ini_contents: + # Update avd_properties first so that they won't override settings + # like screen and ram_size + config_ini_contents.update(self.avd_settings.avd_properties) - with ini.update_ini_file(config_ini) as config_ini_contents: height = self.avd_settings.screen.height or _DEFAULT_SCREEN_HEIGHT width = self.avd_settings.screen.width or _DEFAULT_SCREEN_WIDTH density = self.avd_settings.screen.density or _DEFAULT_SCREEN_DENSITY @@ -263,33 +485,102 @@ def Create(self, 'hw.lcd.density': density, 'hw.lcd.height': height, 'hw.lcd.width': width, + 'hw.mainKeys': 'no', # Show nav buttons on screen }) if self.avd_settings.ram_size: config_ini_contents['hw.ramSize'] = self.avd_settings.ram_size + config_ini_contents['hw.sdCard'] = 'yes' + if self.avd_settings.sdcard.size: + sdcard_path = os.path.join(self._avd_dir, _SDCARD_NAME) + cmd_helper.RunCmd([ + self.mksdcard_path, + self.avd_settings.sdcard.size, + sdcard_path, + ]) + config_ini_contents['hw.sdCard.path'] = sdcard_path + + if not additional_apks: + additional_apks = [] + for pkg in self._config.additional_apk: + apk_dir = os.path.join(COMMON_CIPD_ROOT, pkg.dest_path) + apk_file = _FindMinSdkFile(apk_dir, self._config.min_sdk) + # Some of these files come from chrome internal, so may not be + # available to non-internal permissioned users. + if os.path.exists(apk_file): + logging.info('Adding additional apk for install: %s', apk_file) + additional_apks.append(apk_file) + + if not privileged_apk_tuples: + privileged_apk_tuples = [] + for pkg in self._config.privileged_apk: + apk_dir = os.path.join(COMMON_CIPD_ROOT, pkg.dest_path) + apk_file = _FindMinSdkFile(apk_dir, self._config.min_sdk) + # Some of these files come from chrome internal, so may not be + # available to non-internal permissioned users. + if os.path.exists(apk_file): + logging.info('Adding privilege apk for install: %s', apk_file) + privileged_apk_tuples.append( + (apk_file, self._config.install_privileged_apk_partition)) + # Start & stop the AVD. self._Initialize() - instance = _AvdInstance(self._emulator_path, self._emulator_home, - self._config) + instance = _AvdInstance(self) # Enable debug for snapshot when it is set to True - debug_tags = 'init,snapshot' if snapshot else None - instance.Start( - read_only=False, snapshot_save=snapshot, debug_tags=debug_tags) + debug_tags = 'time,init,snapshot' if snapshot else None + # Installing privileged apks requires modifying the system + # image. + writable_system = bool(privileged_apk_tuples) + instance.Start(ensure_system_settings=False, + read_only=False, + writable_system=writable_system, + gpu_mode=_DEFAULT_GPU_MODE, + debug_tags=debug_tags) + + assert instance.device is not None, '`instance.device` not initialized.' # Android devices with full-disk encryption are encrypted on first boot, # and then get decrypted to continue the boot process (See details in # https://bit.ly/3agmjcM). # Wait for this step to complete since it can take a while for old OSs # like M, otherwise the avd may have "Encryption Unsuccessful" error. - device_utils.DeviceUtils(instance.serial).WaitUntilFullyBooted( - decrypt=True, timeout=180, retries=0) + instance.device.WaitUntilFullyBooted(decrypt=True, timeout=180, retries=0) + + if additional_apks: + for apk in additional_apks: + instance.device.Install(apk, allow_downgrade=True, reinstall=True) + package_name = apk_helper.GetPackageName(apk) + package_version = instance.device.GetApplicationVersion(package_name) + logging.info('The version for package %r on the device is %r', + package_name, package_version) + + if privileged_apk_tuples: + system_app.InstallPrivilegedApps(instance.device, privileged_apk_tuples) + for apk, _ in privileged_apk_tuples: + package_name = apk_helper.GetPackageName(apk) + package_version = instance.device.GetApplicationVersion(package_name) + logging.info('The version for package %r on the device is %r', + package_name, package_version) + + # Always disable the network to prevent built-in system apps from + # updating themselves, which could take over package manager and + # cause shell command timeout. + logging.info('Disabling the network.') + settings.ConfigureContentSettings(instance.device, + settings.NETWORK_DISABLED_SETTINGS) + + if snapshot: + # Reboot so that changes like disabling network can take effect. + instance.device.Reboot() + instance.SaveSnapshot() + instance.Stop() # The multiinstance lock file seems to interfere with the emulator's # operation in some circumstances (beyond the obvious -read-only ones), # and there seems to be no mechanism by which it gets closed or deleted. # See https://bit.ly/2pWQTH7 for context. - multiInstanceLockFile = os.path.join(avd_dir, 'multiinstance.lock') + multiInstanceLockFile = os.path.join(self._avd_dir, 'multiinstance.lock') if os.path.exists(multiInstanceLockFile): os.unlink(multiInstanceLockFile) @@ -297,21 +588,23 @@ def Create(self, 'package': self._config.avd_package.package_name, 'root': - self._emulator_home, + self.emulator_home, 'install_mode': 'copy', 'data': [{ - 'dir': os.path.relpath(avd_dir, self._emulator_home) + 'dir': os.path.relpath(self._avd_dir, self.emulator_home) }, { - 'file': os.path.relpath(root_ini, self._emulator_home) + 'file': + os.path.relpath(self._root_ini_path, self.emulator_home) }, { - 'file': os.path.relpath(features_ini, self._emulator_home) + 'file': + os.path.relpath(self._features_ini_path, self.emulator_home) }], } logging.info('Creating AVD CIPD package.') - logging.debug('ensure file content: %s', - json.dumps(package_def_content, indent=2)) + logging.info('ensure file content: %s', + json.dumps(package_def_content, indent=2)) with tempfile_ext.TemporaryFileName(suffix='.json') as package_def_path: with open(package_def_path, 'w') as package_def_file: @@ -341,16 +634,83 @@ def Create(self, for line in cmd_helper.IterCmdOutputLines(cipd_create_cmd): logging.info(' %s', line) except subprocess.CalledProcessError as e: - raise AvdException( - 'CIPD package creation failed: %s' % str(e), - command=cipd_create_cmd) + # avd.py is executed with python2. + # pylint: disable=W0707 + raise AvdException('CIPD package creation failed: %s' % str(e), + command=cipd_create_cmd) finally: if not keep: logging.info('Deleting AVD.') - avd_manager.Delete(avd_name=self._config.avd_name) + avd_manager.Delete(avd_name=self.avd_name) + + def IsAvailable(self): + """Returns whether emulator is up-to-date.""" + if not os.path.exists(self._config_ini_path): + return False + + # Skip when no version exists to prevent "IsAvailable()" returning False + # for emualtors set up using Create() (rather than Install()). + for cipd_root, pkgs in self._IterCipdPackages(_PACKAGES_RUNTIME, + check_version=False): + stdout = subprocess.run(['cipd', 'installed', '--root', cipd_root], + capture_output=True, + check=False, + encoding='utf8').stdout + # Output looks like: + # Packages: + # name1:version1 + # name2:version2 + installed = [l.strip().split(':', 1) for l in stdout.splitlines()[1:]] + + if any([p.package_name, p.version] not in installed for p in pkgs): + return False + return True + + def Uninstall(self): + """Uninstall all the artifacts associated with the given config. + + Artifacts includes: + - CIPD packages specified in the avd config. + - The local AVD created by `Create`, if present. - def Install(self, packages=_ALL_PACKAGES): + """ + # Delete any existing local AVD. This must occur before deleting CIPD + # packages because a AVD needs system image to be recognized by avdmanager. + avd_manager = _AvdManagerAgent(avd_home=self.avd_home, + sdk_root=self.emulator_sdk_root) + if avd_manager.IsAvailable(self.avd_name): + logging.info('Deleting local AVD %s', self.avd_name) + avd_manager.Delete(self.avd_name) + + # Delete installed CIPD packages. + for cipd_root, _ in self._IterCipdPackages(_PACKAGES_ALL, + check_version=False): + logging.info('Uninstalling packages in %s', cipd_root) + if not os.path.exists(cipd_root): + continue + # Create an empty ensure file to removed any installed CIPD packages. + ensure_path = os.path.join(cipd_root, '.ensure') + with open(ensure_path, 'w') as ensure_file: + ensure_file.write('$ParanoidMode CheckIntegrity\n\n') + ensure_cmd = [ + 'cipd', + 'ensure', + '-ensure-file', + ensure_path, + '-root', + cipd_root, + ] + try: + for line in cmd_helper.IterCmdOutputLines(ensure_cmd): + logging.info(' %s', line) + except subprocess.CalledProcessError as e: + # avd.py is executed with python2. + # pylint: disable=W0707 + raise AvdException('Failed to uninstall CIPD packages: %s' % str(e), + command=ensure_cmd) + + def Install(self): """Installs the requested CIPD packages and prepares them for use. This includes making files writeable and revising some of the @@ -359,26 +719,84 @@ def Install(self, packages=_ALL_PACKAGES): Returns: None Raises: AvdException on failure to install. """ - self._InstallCipdPackages(packages=packages) + self._InstallCipdPackages(_PACKAGES_RUNTIME) self._MakeWriteable() - self._EditConfigs() + self._UpdateConfigs() + self._RebaseQcow2Images() - def _InstallCipdPackages(self, packages): - pkgs_by_dir = {} - if packages is _ALL_PACKAGES: + def _RebaseQcow2Images(self): + """Rebase the paths in qcow2 images. + + qcow2 files may exists in avd directory which have hard-coded paths to the + backing files, e.g., system.img, vendor.img. Such paths need to be rebased + if the avd is moved to a different directory in order to boot successfully. + """ + for f in _BACKING_FILES: + qcow2_image_path = os.path.join(self._avd_dir, '%s.qcow2' % f) + if not os.path.exists(qcow2_image_path): + continue + backing_file_path = os.path.join(self._system_image_dir, f) + logging.info('Rebasing the qcow2 image %r with the backing file %r', + qcow2_image_path, backing_file_path) + cmd_helper.RunCmd([ + self.qemu_img_path, + 'rebase', + '-u', + '-f', + 'qcow2', + '-b', + # The path to backing file must be relative to the qcow2 image. + os.path.relpath(backing_file_path, os.path.dirname(qcow2_image_path)), + qcow2_image_path, + ]) + + def _ListPackages(self, packages): + if packages is _PACKAGES_RUNTIME: packages = [ self._config.avd_package, self._config.emulator_package, self._config.system_image_package, ] - for pkg in packages: - if not pkg.dest_path in pkgs_by_dir: - pkgs_by_dir[pkg.dest_path] = [] - pkgs_by_dir[pkg.dest_path].append(pkg) + elif packages is _PACKAGES_CREATION: + packages = [ + self._config.emulator_package, + self._config.system_image_package, + *self._config.privileged_apk, + *self._config.additional_apk, + ] + elif packages is _PACKAGES_ALL: + packages = [ + self._config.avd_package, + self._config.emulator_package, + self._config.system_image_package, + *self._config.privileged_apk, + *self._config.additional_apk, + ] + return packages + + def _IterCipdPackages(self, packages, check_version=True): + """Iterate a list of CIPD packages by their CIPD roots. + + Args: + packages: a list of packages from an AVD config. + check_version: If set, raise Exception when a package has no version. + """ + pkgs_by_dir = collections.defaultdict(list) + for pkg in self._ListPackages(packages): + if pkg.version: + pkgs_by_dir[pkg.dest_path].append(pkg) + elif check_version: + raise AvdException('Expecting a version for the package %s' % + pkg.package_name) for pkg_dir, pkgs in pkgs_by_dir.items(): - logging.info('Installing packages in %s', pkg_dir) - cipd_root = os.path.join(constants.DIR_SOURCE_ROOT, pkg_dir) + cipd_root = os.path.join(COMMON_CIPD_ROOT, pkg_dir) + yield cipd_root, pkgs + + def _InstallCipdPackages(self, packages, check_version=True): + for cipd_root, pkgs in self._IterCipdPackages(packages, + check_version=check_version): + logging.info('Installing packages in %s', cipd_root) if not os.path.exists(cipd_root): os.makedirs(cipd_root) ensure_path = os.path.join(cipd_root, '.ensure') @@ -401,14 +819,14 @@ def _InstallCipdPackages(self, packages): for line in cmd_helper.IterCmdOutputLines(ensure_cmd): logging.info(' %s', line) except subprocess.CalledProcessError as e: - raise AvdException( - 'Failed to install CIPD package %s: %s' % (pkg.package_name, - str(e)), - command=ensure_cmd) + # avd.py is executed with python2. + # pylint: disable=W0707 + raise AvdException('Failed to install CIPD packages: %s' % str(e), + command=ensure_cmd) def _MakeWriteable(self): # The emulator requires that some files are writable. - for dirname, _, filenames in os.walk(self._emulator_home): + for dirname, _, filenames in os.walk(self.emulator_home): for f in filenames: path = os.path.join(dirname, f) mode = os.lstat(path).st_mode @@ -416,34 +834,40 @@ def _MakeWriteable(self): mode = mode | stat.S_IWUSR os.chmod(path, mode) - def _EditConfigs(self): - android_avd_home = os.path.join(self._emulator_home, 'avd') - avd_dir = os.path.join(android_avd_home, '%s.avd' % self._config.avd_name) - - config_path = os.path.join(avd_dir, 'config.ini') - if os.path.exists(config_path): - with open(config_path) as config_file: - config_contents = ini.load(config_file) - else: - config_contents = {} - - config_contents['hw.sdCard'] = 'true' - if self.avd_settings.sdcard.size: - sdcard_path = os.path.join(avd_dir, 'cr-sdcard.img') - if not os.path.exists(sdcard_path): - mksdcard_path = os.path.join( - os.path.dirname(self._emulator_path), 'mksdcard') - mksdcard_cmd = [ - mksdcard_path, - self.avd_settings.sdcard.size, - sdcard_path, - ] - cmd_helper.RunCmd(mksdcard_cmd) + def _UpdateConfigs(self): + """Update various properties in config files after installation. - config_contents['hw.sdCard.path'] = sdcard_path - - with open(config_path, 'w') as config_file: - ini.dump(config_contents, config_file) + AVD config files contain some properties which can be different between AVD + creation and installation, e.g. hw.sdCard.path, which is an absolute path. + Update their values so that: + * Emulator instance can be booted correctly. + * The snapshot can be loaded successfully. + """ + logging.info('Updating AVD configurations.') + # Update the absolute avd path in root_ini file + with ini.update_ini_file(self._root_ini_path) as r_ini_contents: + r_ini_contents['path'] = self._avd_dir + + # Update hardware settings. + config_paths = [self._config_ini_path] + # The file hardware.ini within each snapshot need to be updated as well. + hw_ini_glob_pattern = os.path.join(self._avd_dir, 'snapshots', '*', + 'hardware.ini') + config_paths.extend(glob.glob(hw_ini_glob_pattern)) + + properties = {} + # Update hw.sdCard.path if applicable + sdcard_path = os.path.join(self._avd_dir, _SDCARD_NAME) + if os.path.exists(sdcard_path): + properties['hw.sdCard.path'] = sdcard_path + + for config_path in config_paths: + with ini.update_ini_file(config_path) as config_contents: + config_contents.update(properties) + + # Create qt config file to disable adb warning when launched in window mode. + with ini.update_ini_file(self._qt_config_path) as config_contents: + config_contents['set'] = {'autoFindAdb': 'false'} def _Initialize(self): if self._initialized: @@ -456,25 +880,17 @@ def _Initialize(self): # Emulator start-up looks for the adb daemon. Make sure it's running. adb_wrapper.AdbWrapper.StartServer() - # Emulator start-up tries to check for the SDK root by looking for - # platforms/ and platform-tools/. Ensure they exist. - # See http://bit.ly/2YAkyFE for context. - required_dirs = [ - os.path.join(self._emulator_sdk_root, 'platforms'), - os.path.join(self._emulator_sdk_root, 'platform-tools'), - ] - for d in required_dirs: - if not os.path.exists(d): - os.makedirs(d) + # Emulator start-up requires a valid sdk root. + assert self.emulator_sdk_root - def CreateInstance(self): + def CreateInstance(self, output_manager=None): """Creates an AVD instance without starting it. Returns: An _AvdInstance. """ self._Initialize() - return _AvdInstance(self._emulator_path, self._emulator_home, self._config) + return _AvdInstance(self, output_manager=output_manager) def StartInstance(self): """Starts an AVD instance. @@ -487,39 +903,68 @@ def StartInstance(self): return instance -class _AvdInstance(object): +class _AvdInstance: """Represents a single running instance of an AVD. This class should only be created directly by AvdConfig.StartInstance, but its other methods can be freely called. """ - def __init__(self, emulator_path, emulator_home, avd_config): + def __init__(self, avd_config, output_manager=None): """Create an _AvdInstance object. Args: - emulator_path: path to the emulator binary. - emulator_home: path to the emulator home directory. - avd_config: AVD config proto. + avd_config: an AvdConfig instance. + output_manager: a pylib.base.output_manager.OutputManager instance. """ self._avd_config = avd_config self._avd_name = avd_config.avd_name - self._emulator_home = emulator_home - self._emulator_path = emulator_path + self._emulator_home = avd_config.emulator_home + self._emulator_path = avd_config.emulator_path self._emulator_proc = None self._emulator_serial = None - self._sink = None + self._emulator_device = None + + self._output_manager = output_manager + self._output_file = None + + self._writable_system = False + self._debug_tags = None def __str__(self): return '%s|%s' % (self._avd_name, (self._emulator_serial or id(self))) def Start(self, + ensure_system_settings=True, read_only=True, - snapshot_save=False, window=False, writable_system=False, - debug_tags=None): - """Starts the emulator running an instance of the given AVD.""" + gpu_mode=_DEFAULT_GPU_MODE, + wipe_data=False, + debug_tags=None, + require_fast_start=False): + """Starts the emulator running an instance of the given AVD. + + Note when ensure_system_settings is True, the program will wait until the + emulator is fully booted, and then update system settings. + """ + is_slow_start = not require_fast_start + # Force to load system snapshot if detected. + if self.HasSystemSnapshot(): + if not writable_system: + logging.info('System snapshot found. Set "writable_system=True" ' + 'to load it properly.') + writable_system = True + if read_only: + logging.info('System snapshot found. Set "read_only=False" ' + 'to load it properly.') + read_only = False + elif writable_system: + is_slow_start = True + logging.warning('Emulator will be slow to start, as ' + '"writable_system=True" but system snapshot not found.') + + self._writable_system = writable_system with tempfile_ext.TemporaryFileName() as socket_path, (contextlib.closing( socket.socket(socket.AF_UNIX))) as sock: @@ -531,44 +976,78 @@ def Start(self, '-report-console', 'unix:%s' % socket_path, '-no-boot-anim', - # Set the gpu mode to swiftshader_indirect otherwise the avd may exit - # with the error "change of render" under window mode - '-gpu', - 'swiftshader_indirect', + # Explicitly prevent emulator from auto-saving to snapshot on exit. + '-no-snapshot-save', + # Explicitly set the snapshot name for auto-load + '-snapshot', + self.GetSnapshotName(), ] + if wipe_data: + emulator_cmd.append('-wipe-data') if read_only: emulator_cmd.append('-read-only') - if not snapshot_save: - emulator_cmd.append('-no-snapshot-save') if writable_system: emulator_cmd.append('-writable-system') + # Note when "--gpu-mode" is set to "host": + # * It needs a valid DISPLAY env, even if "--emulator-window" is false. + # Otherwise it may throw errors like "Failed to initialize backend + # EGL display". See the code in https://bit.ly/3ruiMlB as an example + # to setup the DISPLAY env with xvfb. + # * It will not work under remote sessions like chrome remote desktop. + if gpu_mode: + emulator_cmd.extend(['-gpu', gpu_mode]) if debug_tags: - emulator_cmd.extend(['-debug', debug_tags]) - - emulator_env = {} - if self._emulator_home: - emulator_env['ANDROID_EMULATOR_HOME'] = self._emulator_home + self._debug_tags = set(debug_tags.split(',')) + # Always print timestamp when debug tags are set. + self._debug_tags.add('time') + emulator_cmd.extend(['-debug', ','.join(self._debug_tags)]) + if 'kernel' in self._debug_tags: + # TODO(crbug.com/1404176): newer API levels need "-virtio-console" + # as well to print kernel log. + emulator_cmd.append('-show-kernel') + + emulator_env = { + # kill immediately when emulator hang. + 'ANDROID_EMULATOR_WAIT_TIME_BEFORE_KILL': '0', + # Sets the emulator configuration directory + 'ANDROID_EMULATOR_HOME': self._emulator_home, + } + if 'DISPLAY' in os.environ: + emulator_env['DISPLAY'] = os.environ.get('DISPLAY') if window: - if 'DISPLAY' in os.environ: - emulator_env['DISPLAY'] = os.environ.get('DISPLAY') - else: + if 'DISPLAY' not in emulator_env: raise AvdException('Emulator failed to start: DISPLAY not defined') else: emulator_cmd.append('-no-window') + # Need this for the qt config file to take effect. + xdg_config_dirs = [self._avd_config.xdg_config_dir] + if 'XDG_CONFIG_DIRS' in os.environ: + xdg_config_dirs.append(os.environ.get('XDG_CONFIG_DIRS')) + emulator_env['XDG_CONFIG_DIRS'] = ':'.join(xdg_config_dirs) + sock.listen(1) - logging.info('Starting emulator with commands: %s', - ' '.join(emulator_cmd)) + logging.info('Starting emulator...') + logging.info( + ' With environments: %s', + ' '.join(['%s=%s' % (k, v) for k, v in emulator_env.items()])) + logging.info(' With commands: %s', ' '.join(emulator_cmd)) - # TODO(jbudorick): Add support for logging emulator stdout & stderr at - # higher logging levels. # Enable the emulator log when debug_tags is set. - if not debug_tags: - self._sink = open('/dev/null', 'w') - self._emulator_proc = cmd_helper.Popen( - emulator_cmd, stdout=self._sink, stderr=self._sink, env=emulator_env) + if self._debug_tags: + # Write to an ArchivedFile if output manager is set, otherwise stdout. + if self._output_manager: + self._output_file = self._output_manager.CreateArchivedFile( + 'emulator_%s' % time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()), + 'emulator') + else: + self._output_file = open('/dev/null', 'w') + self._emulator_proc = cmd_helper.Popen(emulator_cmd, + stdout=self._output_file, + stderr=self._output_file, + env=emulator_env) # Waits for the emulator to report its serial as requested via # -report-console. See http://bit.ly/2lK3L18 for more. @@ -580,27 +1059,103 @@ def listen_for_serial(s): try: self._emulator_serial = timeout_retry.Run( - listen_for_serial, timeout=30, retries=0, args=[sock]) + listen_for_serial, + timeout=120 if is_slow_start else 30, + retries=0, + args=[sock]) logging.info('%s started', self._emulator_serial) - except Exception as e: - self.Stop() - raise AvdException('Emulator failed to start: %s' % str(e)) + except Exception: + self.Stop(force=True) + raise + + # Set the system settings in "Start" here instead of setting in "Create" + # because "Create" is used during AVD creation, and we want to avoid extra + # turn-around on rolling AVD. + if ensure_system_settings: + assert self.device is not None, '`instance.device` not initialized.' + logging.info('Waiting for device to be fully booted.') + self.device.WaitUntilFullyBooted(timeout=360 if is_slow_start else 90, + retries=0) + logging.info('Device fully booted, verifying system settings.') + _EnsureSystemSettings(self.device) + + def Stop(self, force=False): + """Stops the emulator process. + + When "force" is True, we will call "terminate" on the emulator process, + which is recommended when emulator is not responding to adb commands. + """ + # Close output file first in case emulator process killing goes wrong. + if self._output_file: + if self._debug_tags: + if self._output_manager: + self._output_manager.ArchiveArchivedFile(self._output_file, + delete=True) + link = self._output_file.Link() + if link: + logging.critical('Emulator logs saved to %s', link) + else: + self._output_file.close() + self._output_file = None - def Stop(self): - """Stops the emulator process.""" if self._emulator_proc: if self._emulator_proc.poll() is None: - if self._emulator_serial: - device_utils.DeviceUtils(self._emulator_serial).adb.Emu('kill') - else: + if force or not self.device: self._emulator_proc.terminate() + else: + self.device.adb.Emu('kill') self._emulator_proc.wait() self._emulator_proc = None + self._emulator_serial = None + self._emulator_device = None - if self._sink: - self._sink.close() - self._sink = None + def GetSnapshotName(self): + """Return the snapshot name to load/save. + + Emulator has a different snapshot process when '-writable-system' flag is + set (See https://issuetracker.google.com/issues/135857816#comment8). + + """ + if self._writable_system: + return _SYSTEM_SNAPSHOT_NAME + + return _DEFAULT_SNAPSHOT_NAME + + def HasSystemSnapshot(self): + """Check if the instance has the snapshot named _SYSTEM_SNAPSHOT_NAME.""" + return self._avd_config.HasSnapshot(_SYSTEM_SNAPSHOT_NAME) + + def SaveSnapshot(self): + snapshot_name = self.GetSnapshotName() + if self.device: + logging.info('Saving snapshot to %r.', snapshot_name) + self.device.adb.Emu(['avd', 'snapshot', 'save', snapshot_name]) @property def serial(self): return self._emulator_serial + + @property + def device(self): + if not self._emulator_device and self._emulator_serial: + self._emulator_device = device_utils.DeviceUtils(self._emulator_serial) + return self._emulator_device + + +# TODO(crbug.com/1275767): Refactor it to a dict-based approach. +def _EnsureSystemSettings(device): + set_long_press_timeout_cmd = [ + 'settings', 'put', 'secure', 'long_press_timeout', _LONG_PRESS_TIMEOUT + ] + device.RunShellCommand(set_long_press_timeout_cmd, check_return=True) + + # Verify if long_press_timeout is set correctly. + get_long_press_timeout_cmd = [ + 'settings', 'get', 'secure', 'long_press_timeout' + ] + adb_output = device.RunShellCommand(get_long_press_timeout_cmd, + check_return=True) + if _LONG_PRESS_TIMEOUT in adb_output: + logging.info('long_press_timeout set to %r', _LONG_PRESS_TIMEOUT) + else: + logging.warning('long_press_timeout is not set correctly') diff --git a/build/android/pylib/local/emulator/ini.py b/build/android/pylib/local/emulator/ini.py index 8f16c3333b4e..79eb01580d3b 100644 --- a/build/android/pylib/local/emulator/ini.py +++ b/build/android/pylib/local/emulator/ini.py @@ -1,23 +1,60 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -"""Basic .ini encoding and decoding.""" +"""Basic .ini encoding and decoding. + +The basic element in an ini file is the key. Every key is constructed by a name +and a value, delimited by an equals sign (=). + +Keys may be grouped into sections. The secetion name will be a line by itself, +in square brackets ([ and ]). All keys after the section are associated with +that section until another section occurs. + +Keys that are not under any section are considered at the top level. + +Section and key names are case sensitive. +""" + -from __future__ import absolute_import import contextlib import os +def add_key(line, config, strict=True): + key, val = line.split('=', 1) + key = key.strip() + val = val.strip() + if strict and key in config: + raise ValueError('Multiple entries present for key "%s"' % key) + config[key] = val + + def loads(ini_str, strict=True): + """Deserialize int_str to a dict (nested dict when has sections) object. + + Duplicated sections will merge their keys. + + When there are multiple entries for a key, at the top level, or under the + same section: + - If strict is true, ValueError will be raised. + - If strict is false, only the last occurrence will be stored. + """ ret = {} + section = None for line in ini_str.splitlines(): - key, val = line.split('=', 1) - key = key.strip() - val = val.strip() - if strict and key in ret: - raise ValueError('Multiple entries present for key "%s"' % key) - ret[key] = val + # Empty line + if not line: + continue + # Section line + if line[0] == '[' and line[-1] == ']': + section = line[1:-1] + if section not in ret: + ret[section] = {} + # Key line + else: + config = ret if section is None else ret[section] + add_key(line, config, strict=strict) return ret @@ -27,10 +64,20 @@ def load(fp): def dumps(obj): - ret = '' + results = [] + key_str = '' + for k, v in sorted(obj.items()): - ret += '%s = %s\n' % (k, str(v)) - return ret + if isinstance(v, dict): + results.append('[%s]\n' % k + dumps(v)) + else: + key_str += '%s = %s\n' % (k, str(v)) + + # Insert key_str at the first position, before any sections + if key_str: + results.insert(0, key_str) + + return '\n'.join(results) def dump(obj, fp): @@ -46,11 +93,10 @@ def update_ini_file(ini_file_path): Yields: The contents of the file, as a dict """ + ini_contents = {} if os.path.exists(ini_file_path): with open(ini_file_path) as ini_file: ini_contents = load(ini_file) - else: - ini_contents = {} yield ini_contents diff --git a/build/android/pylib/local/emulator/ini_test.py b/build/android/pylib/local/emulator/ini_test.py index 0cf92506dbf8..327d6bf72731 100755 --- a/build/android/pylib/local/emulator/ini_test.py +++ b/build/android/pylib/local/emulator/ini_test.py @@ -1,13 +1,17 @@ -#! /usr/bin/env vpython -# Copyright 2020 The Chromium Authors. All rights reserved. +#! /usr/bin/env vpython3 +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Tests for ini.py.""" -from __future__ import absolute_import + +import os +import sys import textwrap import unittest +sys.path.append( + os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..'))) from pylib.local.emulator import ini @@ -17,15 +21,35 @@ def testLoadsBasic(self): foo.bar = 1 foo.baz= example bar.bad =/path/to/thing + + [section_1] + foo.bar = 1 + foo.baz= example + + [section_2] + foo.baz= example + bar.bad =/path/to/thing + + [section_1] + bar.bad =/path/to/thing """) expected = { 'foo.bar': '1', 'foo.baz': 'example', 'bar.bad': '/path/to/thing', + 'section_1': { + 'foo.bar': '1', + 'foo.baz': 'example', + 'bar.bad': '/path/to/thing', + }, + 'section_2': { + 'foo.baz': 'example', + 'bar.bad': '/path/to/thing', + }, } self.assertEqual(expected, ini.loads(ini_str)) - def testLoadsStrictFailure(self): + def testLoadsDuplicatedKeysStrictFailure(self): ini_str = textwrap.dedent("""\ foo.bar = 1 foo.baz = example @@ -35,17 +59,39 @@ def testLoadsStrictFailure(self): with self.assertRaises(ValueError): ini.loads(ini_str, strict=True) + def testLoadsDuplicatedKeysInSectionStrictFailure(self): + ini_str = textwrap.dedent("""\ + [section_1] + foo.bar = 1 + foo.baz = example + bar.bad = /path/to/thing + foo.bar = duplicate + """) + with self.assertRaises(ValueError): + ini.loads(ini_str, strict=True) + def testLoadsPermissive(self): ini_str = textwrap.dedent("""\ foo.bar = 1 foo.baz = example bar.bad = /path/to/thing foo.bar = duplicate + + [section_1] + foo.bar = 1 + foo.baz = example + bar.bad = /path/to/thing + foo.bar = duplicate """) expected = { 'foo.bar': 'duplicate', 'foo.baz': 'example', 'bar.bad': '/path/to/thing', + 'section_1': { + 'foo.bar': 'duplicate', + 'foo.baz': 'example', + 'bar.bad': '/path/to/thing', + }, } self.assertEqual(expected, ini.loads(ini_str, strict=False)) @@ -54,13 +100,53 @@ def testDumpsBasic(self): 'foo.bar': '1', 'foo.baz': 'example', 'bar.bad': '/path/to/thing', + 'section_2': { + 'foo.baz': 'example', + 'bar.bad': '/path/to/thing', + }, + 'section_1': { + 'foo.bar': '1', + 'foo.baz': 'example', + }, } # ini.dumps is expected to dump to string alphabetically - # by key. + # by key and section name. expected = textwrap.dedent("""\ bar.bad = /path/to/thing foo.bar = 1 foo.baz = example + + [section_1] + foo.bar = 1 + foo.baz = example + + [section_2] + bar.bad = /path/to/thing + foo.baz = example + """) + self.assertEqual(expected, ini.dumps(ini_contents)) + + def testDumpsSections(self): + ini_contents = { + 'section_2': { + 'foo.baz': 'example', + 'bar.bad': '/path/to/thing', + }, + 'section_1': { + 'foo.bar': '1', + 'foo.baz': 'example', + }, + } + # ini.dumps is expected to dump to string alphabetically + # by key first, and then by section and the associated keys + expected = textwrap.dedent("""\ + [section_1] + foo.bar = 1 + foo.baz = example + + [section_2] + bar.bad = /path/to/thing + foo.baz = example """) self.assertEqual(expected, ini.dumps(ini_contents)) diff --git a/build/android/pylib/local/emulator/local_emulator_environment.py b/build/android/pylib/local/emulator/local_emulator_environment.py index 1343d8ca71db..d71a38277fa2 100644 --- a/build/android/pylib/local/emulator/local_emulator_environment.py +++ b/build/android/pylib/local/emulator/local_emulator_environment.py @@ -1,14 +1,13 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import + import logging from six.moves import range # pylint: disable=redefined-builtin from devil import base_error from devil.android import device_errors -from devil.android import device_utils from devil.utils import parallelizer from devil.utils import reraiser_thread from devil.utils import timeout_retry @@ -19,6 +18,9 @@ _MAX_ANDROID_EMULATORS = 16 +# TODO(1262303): After Telemetry is supported by python3 we can re-add +# super without arguments in this script. +# pylint: disable=super-with-arguments class LocalEmulatorEnvironment(local_device_environment.LocalDeviceEnvironment): def __init__(self, args, output_manager, error_func): @@ -31,6 +33,7 @@ def __init__(self, args, output_manager, error_func): logging.warning('--emulator-count capped at 16.') self._emulator_count = min(_MAX_ANDROID_EMULATORS, args.emulator_count) self._emulator_window = args.emulator_window + self._emulator_debug_tags = args.emulator_debug_tags self._writable_system = ((hasattr(args, 'use_webview_provider') and args.use_webview_provider) or (hasattr(args, 'replace_system_package') @@ -46,36 +49,37 @@ def SetUp(self): self._avd_config.Install() emulator_instances = [ - self._avd_config.CreateInstance() for _ in range(self._emulator_count) + self._avd_config.CreateInstance(output_manager=self.output_manager) + for _ in range(self._emulator_count) ] - def start_emulator_instance(e): + def start_emulator_instance(inst): + def is_timeout_error(exc): + return isinstance( + exc, + (device_errors.CommandTimeoutError, reraiser_thread.TimeoutError)) - def impl(e): + def impl(inst): try: - e.Start( - window=self._emulator_window, - writable_system=self._writable_system) + inst.Start(window=self._emulator_window, + writable_system=self._writable_system, + debug_tags=self._emulator_debug_tags, + require_fast_start=True) except avd.AvdException: logging.exception('Failed to start emulator instance.') return None - try: - device_utils.DeviceUtils(e.serial).WaitUntilFullyBooted() - except base_error.BaseError: - e.Stop() + except base_error.BaseError as e: + # Timeout error usually indicates the emulator is not responding. + # In this case, we should stop it forcely. + inst.Stop(force=is_timeout_error(e)) raise - return e - - def retry_on_timeout(exc): - return (isinstance(exc, device_errors.CommandTimeoutError) - or isinstance(exc, reraiser_thread.TimeoutError)) + return inst - return timeout_retry.Run( - impl, - timeout=120 if self._writable_system else 30, - retries=2, - args=[e], - retry_if_func=retry_on_timeout) + return timeout_retry.Run(impl, + timeout=120 if self._writable_system else 60, + retries=2, + args=[inst], + retry_if_func=is_timeout_error) parallel_emulators = parallelizer.SyncParallelizer(emulator_instances) self._emulator_instances = [ @@ -87,7 +91,7 @@ def retry_on_timeout(exc): if not self._emulator_instances: raise Exception('Failed to start any instances of the emulator.') - elif len(self._emulator_instances) < self._emulator_count: + if len(self._emulator_instances) < self._emulator_count: logging.warning( 'Running with fewer emulator instances than requested (%d vs %d)', len(self._emulator_instances), self._emulator_count) diff --git a/build/android/pylib/local/emulator/proto/__init__.py b/build/android/pylib/local/emulator/proto/__init__.py index 4a12e35c9256..401c54b0d9c6 100644 --- a/build/android/pylib/local/emulator/proto/__init__.py +++ b/build/android/pylib/local/emulator/proto/__init__.py @@ -1,3 +1,3 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/local/emulator/proto/avd.proto b/build/android/pylib/local/emulator/proto/avd.proto index b06da4900bd9..957897fdf71c 100644 --- a/build/android/pylib/local/emulator/proto/avd.proto +++ b/build/android/pylib/local/emulator/proto/avd.proto @@ -1,5 +1,4 @@ - -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -14,7 +13,7 @@ message CIPDPackage { // Ignored when creating AVD packages. string version = 2; // Path into which the package should be installed. - // src-relative. + // relative to pylib.local.emulator.avd.COMMON_CIPD_ROOT. string dest_path = 3; } @@ -53,6 +52,14 @@ message AvdSettings { // The physical RAM size on the device, in megabytes. uint32 ram_size = 4; + + // The properties for AVD. The pairs here will override the + // default ones in the given system image. + // See https://bit.ly/3052c1V for all the available keys and values. + // + // Note the screen, sdcard, ram_size above are ultimately translated to + // AVD properties and they won't be overwritten by values here. + map avd_properties = 5; } message Avd { @@ -72,4 +79,16 @@ message Avd { // How to configure the AVD at creation. AvdSettings avd_settings = 6; + + // min sdk level for emulator. + uint32 min_sdk = 7; + + // The partition to install the privileged apk. + // version 27 and below is /system. After that it can be + // /system, /product, or /vendor + string install_privileged_apk_partition = 8; + + // Needed for gmscore/phonesky support. + repeated CIPDPackage privileged_apk = 9; + repeated CIPDPackage additional_apk = 10; } diff --git a/build/android/pylib/local/emulator/proto/avd_pb2.py b/build/android/pylib/local/emulator/proto/avd_pb2.py index 49cc1aa830a5..e43534c2bcec 100644 --- a/build/android/pylib/local/emulator/proto/avd_pb2.py +++ b/build/android/pylib/local/emulator/proto/avd_pb2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: avd.proto +# source: build/android/pylib/local/emulator/proto/avd.proto from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message @@ -14,11 +14,12 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name='avd.proto', + name='build/android/pylib/local/emulator/proto/avd.proto', package='tools.android.avd.proto', syntax='proto3', serialized_options=None, - serialized_pb=b'\n\tavd.proto\x12\x17tools.android.avd.proto\"G\n\x0b\x43IPDPackage\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x11\n\tdest_path\x18\x03 \x01(\t\"@\n\x0eScreenSettings\x12\x0e\n\x06height\x18\x01 \x01(\r\x12\r\n\x05width\x18\x02 \x01(\r\x12\x0f\n\x07\x64\x65nsity\x18\x03 \x01(\r\"\x1e\n\x0eSdcardSettings\x12\x0c\n\x04size\x18\x01 \x01(\t\"\xa1\x02\n\x0b\x41vdSettings\x12\x37\n\x06screen\x18\x01 \x01(\x0b\x32\'.tools.android.avd.proto.ScreenSettings\x12\x37\n\x06sdcard\x18\x02 \x01(\x0b\x32\'.tools.android.avd.proto.SdcardSettings\x12U\n\x11\x61\x64vanced_features\x18\x03 \x03(\x0b\x32:.tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry\x12\x10\n\x08ram_size\x18\x04 \x01(\r\x1a\x37\n\x15\x41\x64vancedFeaturesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xad\x02\n\x03\x41vd\x12>\n\x10\x65mulator_package\x18\x01 \x01(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12\x42\n\x14system_image_package\x18\x02 \x01(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12\x19\n\x11system_image_name\x18\x03 \x01(\t\x12\x39\n\x0b\x61vd_package\x18\x04 \x01(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12\x10\n\x08\x61vd_name\x18\x05 \x01(\t\x12:\n\x0c\x61vd_settings\x18\x06 \x01(\x0b\x32$.tools.android.avd.proto.AvdSettingsb\x06proto3' + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n2build/android/pylib/local/emulator/proto/avd.proto\x12\x17tools.android.avd.proto\"G\n\x0b\x43IPDPackage\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x11\n\tdest_path\x18\x03 \x01(\t\"@\n\x0eScreenSettings\x12\x0e\n\x06height\x18\x01 \x01(\r\x12\r\n\x05width\x18\x02 \x01(\r\x12\x0f\n\x07\x64\x65nsity\x18\x03 \x01(\r\"\x1e\n\x0eSdcardSettings\x12\x0c\n\x04size\x18\x01 \x01(\t\"\xa8\x03\n\x0b\x41vdSettings\x12\x37\n\x06screen\x18\x01 \x01(\x0b\x32\'.tools.android.avd.proto.ScreenSettings\x12\x37\n\x06sdcard\x18\x02 \x01(\x0b\x32\'.tools.android.avd.proto.SdcardSettings\x12U\n\x11\x61\x64vanced_features\x18\x03 \x03(\x0b\x32:.tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry\x12\x10\n\x08ram_size\x18\x04 \x01(\r\x12O\n\x0e\x61vd_properties\x18\x05 \x03(\x0b\x32\x37.tools.android.avd.proto.AvdSettings.AvdPropertiesEntry\x1a\x37\n\x15\x41\x64vancedFeaturesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x34\n\x12\x41vdPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xe4\x03\n\x03\x41vd\x12>\n\x10\x65mulator_package\x18\x01 \x01(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12\x42\n\x14system_image_package\x18\x02 \x01(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12\x19\n\x11system_image_name\x18\x03 \x01(\t\x12\x39\n\x0b\x61vd_package\x18\x04 \x01(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12\x10\n\x08\x61vd_name\x18\x05 \x01(\t\x12:\n\x0c\x61vd_settings\x18\x06 \x01(\x0b\x32$.tools.android.avd.proto.AvdSettings\x12\x0f\n\x07min_sdk\x18\x07 \x01(\r\x12(\n install_privileged_apk_partition\x18\x08 \x01(\t\x12<\n\x0eprivileged_apk\x18\t \x03(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12<\n\x0e\x61\x64\x64itional_apk\x18\n \x03(\x0b\x32$.tools.android.avd.proto.CIPDPackageb\x06proto3' ) @@ -30,6 +31,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='package_name', full_name='tools.android.avd.proto.CIPDPackage.package_name', index=0, @@ -37,21 +39,21 @@ has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='version', full_name='tools.android.avd.proto.CIPDPackage.version', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='dest_path', full_name='tools.android.avd.proto.CIPDPackage.dest_path', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], @@ -64,8 +66,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=38, - serialized_end=109, + serialized_start=79, + serialized_end=150, ) @@ -75,6 +77,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='height', full_name='tools.android.avd.proto.ScreenSettings.height', index=0, @@ -82,21 +85,21 @@ has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='width', full_name='tools.android.avd.proto.ScreenSettings.width', index=1, number=2, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='density', full_name='tools.android.avd.proto.ScreenSettings.density', index=2, number=3, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], @@ -109,8 +112,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=111, - serialized_end=175, + serialized_start=152, + serialized_end=216, ) @@ -120,6 +123,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='size', full_name='tools.android.avd.proto.SdcardSettings.size', index=0, @@ -127,7 +131,7 @@ has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], @@ -140,8 +144,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=177, - serialized_end=207, + serialized_start=218, + serialized_end=248, ) @@ -151,6 +155,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='key', full_name='tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry.key', index=0, @@ -158,14 +163,14 @@ has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='value', full_name='tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry.value', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], @@ -178,8 +183,46 @@ extension_ranges=[], oneofs=[ ], - serialized_start=444, - serialized_end=499, + serialized_start=566, + serialized_end=621, +) + +_AVDSETTINGS_AVDPROPERTIESENTRY = _descriptor.Descriptor( + name='AvdPropertiesEntry', + full_name='tools.android.avd.proto.AvdSettings.AvdPropertiesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='tools.android.avd.proto.AvdSettings.AvdPropertiesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='value', full_name='tools.android.avd.proto.AvdSettings.AvdPropertiesEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=b'8\001', + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=623, + serialized_end=675, ) _AVDSETTINGS = _descriptor.Descriptor( @@ -188,6 +231,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='screen', full_name='tools.android.avd.proto.AvdSettings.screen', index=0, @@ -195,32 +239,39 @@ has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='sdcard', full_name='tools.android.avd.proto.AvdSettings.sdcard', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='advanced_features', full_name='tools.android.avd.proto.AvdSettings.advanced_features', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='ram_size', full_name='tools.android.avd.proto.AvdSettings.ram_size', index=3, number=4, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='avd_properties', full_name='tools.android.avd.proto.AvdSettings.avd_properties', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], - nested_types=[_AVDSETTINGS_ADVANCEDFEATURESENTRY, ], + nested_types=[_AVDSETTINGS_ADVANCEDFEATURESENTRY, _AVDSETTINGS_AVDPROPERTIESENTRY, ], enum_types=[ ], serialized_options=None, @@ -229,8 +280,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=210, - serialized_end=499, + serialized_start=251, + serialized_end=675, ) @@ -240,6 +291,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='emulator_package', full_name='tools.android.avd.proto.Avd.emulator_package', index=0, @@ -247,42 +299,70 @@ has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='system_image_package', full_name='tools.android.avd.proto.Avd.system_image_package', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='system_image_name', full_name='tools.android.avd.proto.Avd.system_image_name', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='avd_package', full_name='tools.android.avd.proto.Avd.avd_package', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='avd_name', full_name='tools.android.avd.proto.Avd.avd_name', index=4, number=5, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='avd_settings', full_name='tools.android.avd.proto.Avd.avd_settings', index=5, number=6, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='min_sdk', full_name='tools.android.avd.proto.Avd.min_sdk', index=6, + number=7, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='install_privileged_apk_partition', full_name='tools.android.avd.proto.Avd.install_privileged_apk_partition', index=7, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='privileged_apk', full_name='tools.android.avd.proto.Avd.privileged_apk', index=8, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='additional_apk', full_name='tools.android.avd.proto.Avd.additional_apk', index=9, + number=10, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], @@ -295,18 +375,22 @@ extension_ranges=[], oneofs=[ ], - serialized_start=502, - serialized_end=803, + serialized_start=678, + serialized_end=1162, ) _AVDSETTINGS_ADVANCEDFEATURESENTRY.containing_type = _AVDSETTINGS +_AVDSETTINGS_AVDPROPERTIESENTRY.containing_type = _AVDSETTINGS _AVDSETTINGS.fields_by_name['screen'].message_type = _SCREENSETTINGS _AVDSETTINGS.fields_by_name['sdcard'].message_type = _SDCARDSETTINGS _AVDSETTINGS.fields_by_name['advanced_features'].message_type = _AVDSETTINGS_ADVANCEDFEATURESENTRY +_AVDSETTINGS.fields_by_name['avd_properties'].message_type = _AVDSETTINGS_AVDPROPERTIESENTRY _AVD.fields_by_name['emulator_package'].message_type = _CIPDPACKAGE _AVD.fields_by_name['system_image_package'].message_type = _CIPDPACKAGE _AVD.fields_by_name['avd_package'].message_type = _CIPDPACKAGE _AVD.fields_by_name['avd_settings'].message_type = _AVDSETTINGS +_AVD.fields_by_name['privileged_apk'].message_type = _CIPDPACKAGE +_AVD.fields_by_name['additional_apk'].message_type = _CIPDPACKAGE DESCRIPTOR.message_types_by_name['CIPDPackage'] = _CIPDPACKAGE DESCRIPTOR.message_types_by_name['ScreenSettings'] = _SCREENSETTINGS DESCRIPTOR.message_types_by_name['SdcardSettings'] = _SDCARDSETTINGS @@ -316,21 +400,21 @@ CIPDPackage = _reflection.GeneratedProtocolMessageType('CIPDPackage', (_message.Message,), { 'DESCRIPTOR' : _CIPDPACKAGE, - '__module__' : 'avd_pb2' + '__module__' : 'build.android.pylib.local.emulator.proto.avd_pb2' # @@protoc_insertion_point(class_scope:tools.android.avd.proto.CIPDPackage) }) _sym_db.RegisterMessage(CIPDPackage) ScreenSettings = _reflection.GeneratedProtocolMessageType('ScreenSettings', (_message.Message,), { 'DESCRIPTOR' : _SCREENSETTINGS, - '__module__' : 'avd_pb2' + '__module__' : 'build.android.pylib.local.emulator.proto.avd_pb2' # @@protoc_insertion_point(class_scope:tools.android.avd.proto.ScreenSettings) }) _sym_db.RegisterMessage(ScreenSettings) SdcardSettings = _reflection.GeneratedProtocolMessageType('SdcardSettings', (_message.Message,), { 'DESCRIPTOR' : _SDCARDSETTINGS, - '__module__' : 'avd_pb2' + '__module__' : 'build.android.pylib.local.emulator.proto.avd_pb2' # @@protoc_insertion_point(class_scope:tools.android.avd.proto.SdcardSettings) }) _sym_db.RegisterMessage(SdcardSettings) @@ -339,24 +423,33 @@ 'AdvancedFeaturesEntry' : _reflection.GeneratedProtocolMessageType('AdvancedFeaturesEntry', (_message.Message,), { 'DESCRIPTOR' : _AVDSETTINGS_ADVANCEDFEATURESENTRY, - '__module__' : 'avd_pb2' + '__module__' : 'build.android.pylib.local.emulator.proto.avd_pb2' # @@protoc_insertion_point(class_scope:tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry) }) , + + 'AvdPropertiesEntry' : _reflection.GeneratedProtocolMessageType('AvdPropertiesEntry', (_message.Message,), { + 'DESCRIPTOR' : _AVDSETTINGS_AVDPROPERTIESENTRY, + '__module__' : 'build.android.pylib.local.emulator.proto.avd_pb2' + # @@protoc_insertion_point(class_scope:tools.android.avd.proto.AvdSettings.AvdPropertiesEntry) + }) + , 'DESCRIPTOR' : _AVDSETTINGS, - '__module__' : 'avd_pb2' + '__module__' : 'build.android.pylib.local.emulator.proto.avd_pb2' # @@protoc_insertion_point(class_scope:tools.android.avd.proto.AvdSettings) }) _sym_db.RegisterMessage(AvdSettings) _sym_db.RegisterMessage(AvdSettings.AdvancedFeaturesEntry) +_sym_db.RegisterMessage(AvdSettings.AvdPropertiesEntry) Avd = _reflection.GeneratedProtocolMessageType('Avd', (_message.Message,), { 'DESCRIPTOR' : _AVD, - '__module__' : 'avd_pb2' + '__module__' : 'build.android.pylib.local.emulator.proto.avd_pb2' # @@protoc_insertion_point(class_scope:tools.android.avd.proto.Avd) }) _sym_db.RegisterMessage(Avd) _AVDSETTINGS_ADVANCEDFEATURESENTRY._options = None +_AVDSETTINGS_AVDPROPERTIESENTRY._options = None # @@protoc_insertion_point(module_scope) diff --git a/build/android/pylib/local/local_test_server_spawner.py b/build/android/pylib/local/local_test_server_spawner.py index f21f1be3f2d9..453d9aa4e87f 100644 --- a/build/android/pylib/local/local_test_server_spawner.py +++ b/build/android/pylib/local/local_test_server_spawner.py @@ -1,8 +1,8 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import + import json import time @@ -62,7 +62,7 @@ def Unmap(self, device_port): class LocalTestServerSpawner(test_server.TestServer): def __init__(self, port, device, tool): - super(LocalTestServerSpawner, self).__init__() + super().__init__() self._device = device self._spawning_server = chrome_test_server_spawner.SpawningServer( port, PortForwarderAndroid(device, tool), MAX_TEST_SERVER_INSTANCES) diff --git a/build/android/pylib/local/machine/__init__.py b/build/android/pylib/local/machine/__init__.py index ca3e206fdd8f..68130d5941d9 100644 --- a/build/android/pylib/local/machine/__init__.py +++ b/build/android/pylib/local/machine/__init__.py @@ -1,3 +1,3 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/local/machine/local_machine_environment.py b/build/android/pylib/local/machine/local_machine_environment.py index d198f8970607..d75dc8869d99 100644 --- a/build/android/pylib/local/machine/local_machine_environment.py +++ b/build/android/pylib/local/machine/local_machine_environment.py @@ -1,20 +1,14 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import -import devil_chromium -from pylib import constants from pylib.base import environment class LocalMachineEnvironment(environment.Environment): def __init__(self, _args, output_manager, _error_func): - super(LocalMachineEnvironment, self).__init__(output_manager) - - devil_chromium.Initialize( - output_directory=constants.GetOutDirectory()) + super().__init__(output_manager) #override def SetUp(self): diff --git a/build/android/pylib/local/machine/local_machine_junit_test_run.py b/build/android/pylib/local/machine/local_machine_junit_test_run.py index a64b63b54fc0..a923d6a21ba9 100644 --- a/build/android/pylib/local/machine/local_machine_junit_test_run.py +++ b/build/android/pylib/local/machine/local_machine_junit_test_run.py @@ -1,25 +1,28 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import -import collections import json import logging import multiprocessing import os -import select +import queue +import re import subprocess import sys +import tempfile +import threading +import time import zipfile from six.moves import range # pylint: disable=redefined-builtin +from devil.utils import cmd_helper +from py_utils import tempfile_ext from pylib import constants from pylib.base import base_test_result from pylib.base import test_run from pylib.constants import host_paths from pylib.results import json_results -from py_utils import tempfile_ext # These Test classes are used for running tests and are excluded in the test @@ -42,19 +45,37 @@ # and 6 sec with 2 or more shards. _MIN_CLASSES_PER_SHARD = 8 +# Running the largest test suite with a single shard takes about 22 minutes. +_SHARD_TIMEOUT = 30 * 60 -class LocalMachineJunitTestRun(test_run.TestRun): - def __init__(self, env, test_instance): - super(LocalMachineJunitTestRun, self).__init__(env, test_instance) +# RegExp to detect logcat lines, e.g., 'I/AssetManager: not found'. +_LOGCAT_RE = re.compile(r'[A-Z]/[\w\d_-]+:') - #override + +class LocalMachineJunitTestRun(test_run.TestRun): + # override def TestPackage(self): return self._test_instance.suite - #override + # override def SetUp(self): pass + def _GetFilterArgs(self, shard_test_filter=None): + ret = [] + if shard_test_filter: + ret += ['-gtest-filter', ':'.join(shard_test_filter)] + + for test_filter in self._test_instance.test_filters: + ret += ['-gtest-filter', test_filter] + + if self._test_instance.package_filter: + ret += ['-package-filter', self._test_instance.package_filter] + if self._test_instance.runner_filter: + ret += ['-runner-filter', self._test_instance.runner_filter] + + return ret + def _CreateJarArgsList(self, json_result_file_paths, group_test_list, shards): # Creates a list of jar_args. The important thing is each jar_args list # has a different json_results file for writing test results to and that @@ -63,43 +84,41 @@ def _CreateJarArgsList(self, json_result_file_paths, group_test_list, shards): jar_args_list = [['-json-results-file', result_file] for result_file in json_result_file_paths] for index, jar_arg in enumerate(jar_args_list): - if shards > 1: - jar_arg.extend(['-gtest-filter', ':'.join(group_test_list[index])]) - elif self._test_instance.test_filter: - jar_arg.extend(['-gtest-filter', self._test_instance.test_filter]) - - if self._test_instance.package_filter: - jar_arg.extend(['-package-filter', self._test_instance.package_filter]) - if self._test_instance.runner_filter: - jar_arg.extend(['-runner-filter', self._test_instance.runner_filter]) + shard_test_filter = group_test_list[index] if shards > 1 else None + jar_arg += self._GetFilterArgs(shard_test_filter) return jar_args_list - def _CreateJvmArgsList(self): + def _CreateJvmArgsList(self, for_listing=False): # Creates a list of jvm_args (robolectric, code coverage, etc...) jvm_args = [ '-Drobolectric.dependency.dir=%s' % self._test_instance.robolectric_runtime_deps_dir, '-Ddir.source.root=%s' % constants.DIR_SOURCE_ROOT, + # Use locally available sdk jars from 'robolectric.dependency.dir' + '-Drobolectric.offline=true', '-Drobolectric.resourcesMode=binary', + '-Drobolectric.logging=stdout', + '-Djava.library.path=%s' % self._test_instance.native_libs_dir, ] - if logging.getLogger().isEnabledFor(logging.INFO): - jvm_args += ['-Drobolectric.logging=stdout'] - if self._test_instance.debug_socket: + if self._test_instance.debug_socket and not for_listing: jvm_args += [ - '-agentlib:jdwp=transport=dt_socket' - ',server=y,suspend=y,address=%s' % self._test_instance.debug_socket + '-Dchromium.jdwp_active=true', + ('-agentlib:jdwp=transport=dt_socket' + ',server=y,suspend=y,address=%s' % self._test_instance.debug_socket) ] - if self._test_instance.coverage_dir: + if self._test_instance.coverage_dir and not for_listing: if not os.path.exists(self._test_instance.coverage_dir): os.makedirs(self._test_instance.coverage_dir) elif not os.path.isdir(self._test_instance.coverage_dir): raise Exception('--coverage-dir takes a directory, not file path.') + # Jacoco supports concurrent processes using the same output file: + # https://github.com/jacoco/jacoco/blob/6cd3f0bd8e348f8fba7bffec5225407151f1cc91/org.jacoco.agent.rt/src/org/jacoco/agent/rt/internal/output/FileOutput.java#L67 + # So no need to vary the output based on shard number. + jacoco_coverage_file = os.path.join(self._test_instance.coverage_dir, + '%s.exec' % self._test_instance.suite) if self._test_instance.coverage_on_the_fly: - jacoco_coverage_file = os.path.join( - self._test_instance.coverage_dir, - '%s.exec' % self._test_instance.suite) jacoco_agent_path = os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'jacoco', 'lib', 'jacocoagent.jar') @@ -109,41 +128,58 @@ def _CreateJvmArgsList(self): jvm_args.append( jacoco_args.format(jacoco_agent_path, jacoco_coverage_file)) else: - jvm_args.append('-Djacoco-agent.destfile=%s' % - os.path.join(self._test_instance.coverage_dir, - '%s.exec' % self._test_instance.suite)) + jvm_args.append('-Djacoco-agent.destfile=%s' % jacoco_coverage_file) return jvm_args + @property + def _wrapper_path(self): + return os.path.join(constants.GetOutDirectory(), 'bin', 'helper', + self._test_instance.suite) + #override - def RunTests(self, results): - wrapper_path = os.path.join(constants.GetOutDirectory(), 'bin', 'helper', - self._test_instance.suite) + def GetTestsForListing(self): + with tempfile_ext.NamedTemporaryDirectory() as temp_dir: + cmd = [self._wrapper_path, '--list-tests'] + self._GetFilterArgs() + jvm_args = self._CreateJvmArgsList(for_listing=True) + if jvm_args: + cmd += ['--jvm-args', '"%s"' % ' '.join(jvm_args)] + AddPropertiesJar([cmd], temp_dir, self._test_instance.resource_apk) + lines = subprocess.check_output(cmd, encoding='utf8').splitlines() + PREFIX = '#TEST# ' + prefix_len = len(PREFIX) + # Filter log messages other than test names (Robolectric logs to stdout). + return sorted(l[prefix_len:] for l in lines if l.startswith(PREFIX)) + + # override + def RunTests(self, results, raw_logs_fh=None): # This avoids searching through the classparth jars for tests classes, # which takes about 1-2 seconds. - # Do not shard when a test filter is present since we do not know at this - # point which tests will be filtered out. - if (self._test_instance.shards == 1 or self._test_instance.test_filter - or self._test_instance.suite in _EXCLUDED_SUITES): + if (self._test_instance.shards == 1 + # TODO(crbug.com/1383650): remove this + or self._test_instance.has_literal_filters or + self._test_instance.suite in _EXCLUDED_SUITES): test_classes = [] shards = 1 else: - test_classes = _GetTestClasses(wrapper_path) + test_classes = _GetTestClasses(self._wrapper_path) shards = ChooseNumOfShards(test_classes, self._test_instance.shards) logging.info('Running tests on %d shard(s).', shards) group_test_list = GroupTestsForShard(shards, test_classes) with tempfile_ext.NamedTemporaryDirectory() as temp_dir: - cmd_list = [[wrapper_path] for _ in range(shards)] + cmd_list = [[self._wrapper_path] for _ in range(shards)] json_result_file_paths = [ os.path.join(temp_dir, 'results%d.json' % i) for i in range(shards) ] jar_args_list = self._CreateJarArgsList(json_result_file_paths, group_test_list, shards) - for i in range(shards): - cmd_list[i].extend(['--jar-args', '"%s"' % ' '.join(jar_args_list[i])]) + if jar_args_list: + for i in range(shards): + cmd_list[i].extend( + ['--jar-args', '"%s"' % ' '.join(jar_args_list[i])]) jvm_args = self._CreateJvmArgsList() if jvm_args: @@ -152,12 +188,21 @@ def RunTests(self, results): AddPropertiesJar(cmd_list, temp_dir, self._test_instance.resource_apk) - procs = [ - subprocess.Popen(cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) for cmd in cmd_list - ] - PrintProcessesStdout(procs) + show_logcat = logging.getLogger().isEnabledFor(logging.INFO) + num_omitted_lines = 0 + for line in _RunCommandsAndSerializeOutput(cmd_list): + if raw_logs_fh: + raw_logs_fh.write(line) + if show_logcat or not _LOGCAT_RE.match(line): + sys.stdout.write(line) + else: + num_omitted_lines += 1 + + if num_omitted_lines > 0: + logging.critical('%d log lines omitted.', num_omitted_lines) + sys.stdout.flush() + if raw_logs_fh: + raw_logs_fh.flush() results_list = [] try: @@ -169,15 +214,15 @@ def RunTests(self, results): # In the case of a failure in the JUnit or Robolectric test runner # the output json file may never be written. results_list = [ - base_test_result.BaseTestResult( - 'Test Runner Failure', base_test_result.ResultType.UNKNOWN) + base_test_result.BaseTestResult('Test Runner Failure', + base_test_result.ResultType.UNKNOWN) ] test_run_results = base_test_result.TestRunResults() test_run_results.AddResults(results_list) results.append(test_run_results) - #override + # override def TearDown(self): pass @@ -188,7 +233,14 @@ def AddPropertiesJar(cmd_list, temp_dir, resource_apk): properties_jar_path = os.path.join(temp_dir, 'properties.jar') with zipfile.ZipFile(properties_jar_path, 'w') as z: z.writestr('com/android/tools/test_config.properties', - 'android_resource_apk=%s' % resource_apk) + 'android_resource_apk=%s\n' % resource_apk) + props = [ + 'application = android.app.Application', + 'sdk = 28', + ('shadows = org.chromium.testing.local.' + 'CustomShadowApplicationPackageManager'), + ] + z.writestr('robolectric.properties', '\n'.join(props)) for cmd in cmd_list: cmd.extend(['--classpath', properties_jar_path]) @@ -236,40 +288,122 @@ def GroupTestsForShard(num_of_shards, test_classes): return test_dict -def PrintProcessesStdout(procs): - """Prints the stdout of all the processes. +def _DumpJavaStacks(pid): + jcmd = os.path.join(constants.JAVA_HOME, 'bin', 'jcmd') + cmd = [jcmd, str(pid), 'Thread.print'] + result = subprocess.run(cmd, + check=False, + stdout=subprocess.PIPE, + encoding='utf8') + if result.returncode: + return 'Failed to dump stacks\n' + result.stdout + return result.stdout - Buffers the stdout of the processes and prints it when finished. + +def _RunCommandsAndSerializeOutput(cmd_list): + """Runs multiple commands in parallel and yields serialized output lines. Args: - procs: A list of subprocesses. + cmd_list: List of commands. Returns: N/A - """ - streams = [p.stdout for p in procs] - outputs = collections.defaultdict(list) - first_fd = streams[0].fileno() - - while streams: - rstreams, _, _ = select.select(streams, [], []) - for stream in rstreams: - line = stream.readline() - if line: - # Print out just one output so user can see work being done rather - # than waiting for it all at the end. - if stream.fileno() == first_fd: - sys.stdout.write(line) - else: - outputs[stream.fileno()].append(line) - else: - streams.remove(stream) # End of stream. - for p in procs: - sys.stdout.write(''.join(outputs[p.stdout.fileno()])) + Raises: + TimeoutError: If timeout is exceeded. + """ + num_shards = len(cmd_list) + assert num_shards > 0 + procs = [] + temp_files = [] + for i, cmd in enumerate(cmd_list): + # Shard 0 yields results immediately, the rest write to files. + if i == 0: + temp_files.append(None) # Placeholder. + procs.append( + cmd_helper.Popen( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + )) + else: + temp_file = tempfile.TemporaryFile(mode='w+t', encoding='utf-8') + temp_files.append(temp_file) + procs.append(cmd_helper.Popen( + cmd, + stdout=temp_file, + stderr=temp_file, + )) + + deadline = time.time() + (_SHARD_TIMEOUT / (num_shards // 2 + 1)) + + yield '\n' + yield 'Shard 0 output:\n' + + # The following will be run from a thread to pump Shard 0 results, allowing + # live output while allowing timeout. + def pump_stream_to_queue(f, q): + for line in f: + q.put(line) + q.put(None) + + shard_0_q = queue.Queue() + shard_0_pump = threading.Thread(target=pump_stream_to_queue, + args=(procs[0].stdout, shard_0_q)) + shard_0_pump.start() + + timeout_dumps = {} + + # Print the first process until timeout or completion. + while shard_0_pump.is_alive(): + try: + line = shard_0_q.get(timeout=deadline - time.time()) + if line is None: + break + yield line + except queue.Empty: + if time.time() > deadline: + break + + # Wait for remaining processes to finish. + for i, proc in enumerate(procs): + try: + proc.wait(timeout=deadline - time.time()) + except subprocess.TimeoutExpired: + timeout_dumps[i] = _DumpJavaStacks(proc.pid) + proc.kill() + + # Output any remaining output from a timed-out first shard. + shard_0_pump.join() + while not shard_0_q.empty(): + yield shard_0_q.get() + + for i in range(1, num_shards): + f = temp_files[i] + yield '\n' + yield 'Shard %d output:\n' % i + f.seek(0) + for line in f.readlines(): + yield line + f.close() + + # Output stacks + if timeout_dumps: + yield '\n' + yield ('=' * 80) + '\n' + yield '\nOne or mord shards timed out.\n' + yield ('=' * 80) + '\n' + for i, dump in timeout_dumps.items(): + yield 'Index of timed out shard: %d\n' % i + yield 'Thread dump:\n' + yield dump + yield '\n' + + raise cmd_helper.TimeoutError('Junit shards timed out.') def _GetTestClasses(file_path): - test_jar_paths = subprocess.check_output([file_path, '--print-classpath']) + test_jar_paths = subprocess.check_output([file_path, + '--print-classpath']).decode() test_jar_paths = test_jar_paths.split(':') test_classes = [] diff --git a/build/android/pylib/local/machine/local_machine_junit_test_run_test.py b/build/android/pylib/local/machine/local_machine_junit_test_run_test.py index 2bbe561982c8..d8913b44b54e 100755 --- a/build/android/pylib/local/machine/local_machine_junit_test_run_test.py +++ b/build/android/pylib/local/machine/local_machine_junit_test_run_test.py @@ -1,11 +1,11 @@ -#!/usr/bin/env vpython -# Copyright 2020 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # pylint: disable=protected-access -from __future__ import absolute_import + import os import unittest @@ -20,17 +20,17 @@ def testAddPropertiesJar(self): apk = 'resource_apk' cmd_list = [] local_machine_junit_test_run.AddPropertiesJar(cmd_list, temp_dir, apk) - self.assertEquals(cmd_list, []) + self.assertEqual(cmd_list, []) cmd_list = [['test1']] local_machine_junit_test_run.AddPropertiesJar(cmd_list, temp_dir, apk) - self.assertEquals( + self.assertEqual( cmd_list[0], ['test1', '--classpath', os.path.join(temp_dir, 'properties.jar')]) cmd_list = [['test1'], ['test2']] local_machine_junit_test_run.AddPropertiesJar(cmd_list, temp_dir, apk) - self.assertEquals(len(cmd_list[0]), 3) - self.assertEquals( + self.assertEqual(len(cmd_list[0]), 3) + self.assertEqual( cmd_list[1], ['test2', '--classpath', os.path.join(temp_dir, 'properties.jar')]) @@ -43,20 +43,20 @@ def testChooseNumOfShards(self, mock_cpu_count): test_classes = [1] * 50 shards = local_machine_junit_test_run.ChooseNumOfShards( test_classes, test_shards) - self.assertEquals(1, shards) + self.assertEqual(1, shards) # Tests setting shards. test_shards = 4 shards = local_machine_junit_test_run.ChooseNumOfShards( test_classes, test_shards) - self.assertEquals(4, shards) + self.assertEqual(4, shards) # Tests using min_class per shards. test_classes = [1] * 20 test_shards = 8 shards = local_machine_junit_test_run.ChooseNumOfShards( test_classes, test_shards) - self.assertEquals(2, shards) + self.assertEqual(2, shards) def testGroupTestsForShard(self): test_classes = [] diff --git a/build/android/pylib/monkey/monkey_test_instance.py b/build/android/pylib/monkey/monkey_test_instance.py index 6ab4e370bfca..58c4fa426f1b 100644 --- a/build/android/pylib/monkey/monkey_test_instance.py +++ b/build/android/pylib/monkey/monkey_test_instance.py @@ -1,8 +1,7 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import import random from pylib import constants @@ -14,7 +13,7 @@ class MonkeyTestInstance(test_instance.TestInstance): def __init__(self, args, _): - super(MonkeyTestInstance, self).__init__() + super().__init__() self._categories = args.categories self._event_count = args.event_count diff --git a/build/android/pylib/output/__init__.py b/build/android/pylib/output/__init__.py index a22a6ee39a97..b8e1dbd6e92a 100644 --- a/build/android/pylib/output/__init__.py +++ b/build/android/pylib/output/__init__.py @@ -1,3 +1,3 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/output/local_output_manager.py b/build/android/pylib/output/local_output_manager.py index 89becd7f7130..74b4b95b8b56 100644 --- a/build/android/pylib/output/local_output_manager.py +++ b/build/android/pylib/output/local_output_manager.py @@ -1,11 +1,15 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import time import os import shutil -import urllib + +try: + from urllib.parse import quote +except ImportError: + from urllib import quote from pylib.base import output_manager @@ -17,7 +21,7 @@ class LocalOutputManager(output_manager.OutputManager): """ def __init__(self, output_dir): - super(LocalOutputManager, self).__init__() + super().__init__() timestamp = time.strftime( '%Y_%m_%dT%H_%M_%S', time.localtime()) self._output_root = os.path.abspath(os.path.join( @@ -32,12 +36,11 @@ def _CreateArchivedFile(self, out_filename, out_subdir, datatype): class LocalArchivedFile(output_manager.ArchivedFile): def __init__(self, out_filename, out_subdir, datatype, out_root): - super(LocalArchivedFile, self).__init__( - out_filename, out_subdir, datatype) + super().__init__(out_filename, out_subdir, datatype) self._output_path = os.path.join(out_root, out_subdir, out_filename) def _Link(self): - return 'file://%s' % urllib.quote(self._output_path) + return 'file://%s' % quote(self._output_path) def _Archive(self): if not os.path.exists(os.path.dirname(self._output_path)): diff --git a/build/android/pylib/output/local_output_manager_test.py b/build/android/pylib/output/local_output_manager_test.py index 7954350c0322..d2388140b2f6 100755 --- a/build/android/pylib/output/local_output_manager_test.py +++ b/build/android/pylib/output/local_output_manager_test.py @@ -1,5 +1,5 @@ -#! /usr/bin/env vpython -# Copyright 2017 The Chromium Authors. All rights reserved. +#! /usr/bin/env vpython3 +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/output/noop_output_manager.py b/build/android/pylib/output/noop_output_manager.py index d29a7432f96d..acabd30dc18f 100644 --- a/build/android/pylib/output/noop_output_manager.py +++ b/build/android/pylib/output/noop_output_manager.py @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -13,9 +13,6 @@ class NoopOutputManager(output_manager.OutputManager): - def __init__(self): - super(NoopOutputManager, self).__init__() - #override def _CreateArchivedFile(self, out_filename, out_subdir, datatype): del out_filename, out_subdir, datatype @@ -25,7 +22,7 @@ def _CreateArchivedFile(self, out_filename, out_subdir, datatype): class NoopArchivedFile(output_manager.ArchivedFile): def __init__(self): - super(NoopArchivedFile, self).__init__(None, None, None) + super().__init__(None, None, None) def Link(self): """NoopArchivedFiles are not retained.""" @@ -36,7 +33,6 @@ def _Link(self): def Archive(self): """NoopArchivedFiles are not retained.""" - pass def _Archive(self): pass diff --git a/build/android/pylib/output/noop_output_manager_test.py b/build/android/pylib/output/noop_output_manager_test.py index 4e470efc0edb..ff4c805d9c56 100755 --- a/build/android/pylib/output/noop_output_manager_test.py +++ b/build/android/pylib/output/noop_output_manager_test.py @@ -1,5 +1,5 @@ -#! /usr/bin/env vpython -# Copyright 2017 The Chromium Authors. All rights reserved. +#! /usr/bin/env vpython3 +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/output/remote_output_manager.py b/build/android/pylib/output/remote_output_manager.py index 9fdb4bf65f92..bf585bbe5b0f 100644 --- a/build/android/pylib/output/remote_output_manager.py +++ b/build/android/pylib/output/remote_output_manager.py @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -22,7 +22,7 @@ def __init__(self, bucket): Args bucket: Bucket to use when saving to Google Storage. """ - super(RemoteOutputManager, self).__init__() + super().__init__() self._bucket = bucket #override @@ -43,7 +43,7 @@ def _CreateArchivedFile(self, out_filename, out_subdir, datatype): class LogdogArchivedFile(output_manager.ArchivedFile): def __init__(self, out_filename, out_subdir, datatype): - super(LogdogArchivedFile, self).__init__(out_filename, out_subdir, datatype) + super().__init__(out_filename, out_subdir, datatype) self._stream_name = '%s_%s' % (out_subdir, out_filename) def _Link(self): @@ -57,8 +57,7 @@ def _Archive(self): class GoogleStorageArchivedFile(output_manager.ArchivedFile): def __init__(self, out_filename, out_subdir, datatype, bucket): - super(GoogleStorageArchivedFile, self).__init__( - out_filename, out_subdir, datatype) + super().__init__(out_filename, out_subdir, datatype) self._bucket = bucket self._upload_path = None self._content_addressed = None diff --git a/build/android/pylib/output/remote_output_manager_test.py b/build/android/pylib/output/remote_output_manager_test.py index 4c6c081003e7..875451c8bde5 100755 --- a/build/android/pylib/output/remote_output_manager_test.py +++ b/build/android/pylib/output/remote_output_manager_test.py @@ -1,5 +1,5 @@ -#! /usr/bin/env vpython -# Copyright 2017 The Chromium Authors. All rights reserved. +#! /usr/bin/env vpython3 +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/pexpect.py b/build/android/pylib/pexpect.py index cf59fb0f6d36..6ed6451b1ab3 100644 --- a/build/android/pylib/pexpect.py +++ b/build/android/pylib/pexpect.py @@ -1,4 +1,4 @@ -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from __future__ import absolute_import diff --git a/build/android/pylib/restart_adbd.sh b/build/android/pylib/restart_adbd.sh index 393b2ebac045..201628629eb6 100755 --- a/build/android/pylib/restart_adbd.sh +++ b/build/android/pylib/restart_adbd.sh @@ -1,6 +1,6 @@ #!/system/bin/sh -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/results/__init__.py b/build/android/pylib/results/__init__.py index 4d6aabb953d6..d46d7b496679 100644 --- a/build/android/pylib/results/__init__.py +++ b/build/android/pylib/results/__init__.py @@ -1,3 +1,3 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/results/flakiness_dashboard/__init__.py b/build/android/pylib/results/flakiness_dashboard/__init__.py index 4d6aabb953d6..d46d7b496679 100644 --- a/build/android/pylib/results/flakiness_dashboard/__init__.py +++ b/build/android/pylib/results/flakiness_dashboard/__init__.py @@ -1,3 +1,3 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/results/flakiness_dashboard/json_results_generator.py b/build/android/pylib/results/flakiness_dashboard/json_results_generator.py index b2e542bd2a67..3e753e55da02 100644 --- a/build/android/pylib/results/flakiness_dashboard/json_results_generator.py +++ b/build/android/pylib/results/flakiness_dashboard/json_results_generator.py @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -13,7 +13,13 @@ import mimetypes import os import time -import urllib2 +try: + from urllib.request import urlopen, Request + from urllib.error import HTTPError, URLError + from urllib.parse import quote +except ImportError: + from urllib import quote + from urllib2 import urlopen, HTTPError, URLError, Request _log = logging.getLogger(__name__) @@ -44,11 +50,11 @@ def WriteJSON(json_object, file_path, callback=None): def ConvertTrieToFlatPaths(trie, prefix=None): """Flattens the trie of paths, prepending a prefix to each.""" result = {} - for name, data in trie.iteritems(): + for name, data in trie.items(): if prefix: name = prefix + '/' + name - if len(data) and not 'results' in data: + if len(data) != 0 and not 'results' in data: result.update(ConvertTrieToFlatPaths(data, name)) else: result[name] = data @@ -91,11 +97,11 @@ def TestTimingsTrie(individual_test_timings): return trie -class TestResult(object): +class TestResult: """A simple class that represents a single test result.""" # Test modifier constants. - (NONE, FAILS, FLAKY, DISABLED) = range(4) + (NONE, FAILS, FLAKY, DISABLED) = list(range(4)) def __init__(self, test, failed=False, elapsed_time=0): self.test_name = test @@ -106,7 +112,7 @@ def __init__(self, test, failed=False, elapsed_time=0): try: test_name = test.split('.')[1] except IndexError: - _log.warn('Invalid test name: %s.', test) + _log.warning('Invalid test name: %s.', test) if test_name.startswith('FAILS_'): self.modifier = self.FAILS @@ -121,7 +127,7 @@ def Fixable(self): return self.failed or self.modifier == self.DISABLED -class JSONResultsGeneratorBase(object): +class JSONResultsGeneratorBase: """A JSON results generator for generic tests.""" MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG = 750 @@ -195,7 +201,7 @@ def __init__(self, builder_name, build_name, build_number, self._results_directory = results_file_base_path self._test_results_map = test_results_map - self._test_results = test_results_map.values() + self._test_results = list(test_results_map.values()) self._svn_repositories = svn_repositories if not self._svn_repositories: @@ -217,7 +223,7 @@ def GenerateJSONOutput(self): WriteJSON(json_object, file_path) def GenerateTimesMSFile(self): - times = TestTimingsTrie(self._test_results_map.values()) + times = TestTimingsTrie(list(self._test_results_map.values())) file_path = os.path.join(self._results_directory, self.TIMES_MS_FILENAME) WriteJSON(times, file_path) @@ -231,9 +237,10 @@ def GetJSON(self): # If there was an error don't write a results.json # file at all as it would lose all the information on the # bot. - _log.error('Archive directory is inaccessible. Not ' - 'modifying or clobbering the results.json ' - 'file: ' + str(error)) + _log.error( + 'Archive directory is inaccessible. Not ' + 'modifying or clobbering the results.json ' + 'file: %s', error) return None builder_name = self._builder_name @@ -315,7 +322,7 @@ def _GetTestTiming(self, test_name): def _GetFailedTestNames(self): """Returns a set of failed test names.""" - return set([r.test_name for r in self._test_results if r.failed]) + return set(r.test_name for r in self._test_results if r.failed) def _GetModifierChar(self, test_name): """Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT, @@ -326,7 +333,7 @@ def _GetModifierChar(self, test_name): return self.__class__.NO_DATA_RESULT test_result = self._test_results_map[test_name] - if test_result.modifier in self.MODIFIER_TO_CHAR.keys(): + if test_result.modifier in list(self.MODIFIER_TO_CHAR.keys()): return self.MODIFIER_TO_CHAR[test_result.modifier] return self.__class__.PASS_RESULT @@ -374,25 +381,21 @@ def _GetArchivedJSONResults(self): return {}, None results_file_url = (self.URL_FOR_TEST_LIST_JSON % - (urllib2.quote(self._test_results_server), - urllib2.quote(self._builder_name), - self.RESULTS_FILENAME, - urllib2.quote(self._test_type), - urllib2.quote(self._master_name))) + (quote(self._test_results_server), + quote(self._builder_name), self.RESULTS_FILENAME, + quote(self._test_type), quote(self._master_name))) - # pylint: disable=redefined-variable-type try: # FIXME: We should talk to the network via a Host object. - results_file = urllib2.urlopen(results_file_url) + results_file = urlopen(results_file_url) old_results = results_file.read() - except urllib2.HTTPError as http_error: + except HTTPError as http_error: # A non-4xx status code means the bot is hosed for some reason # and we can't grab the results.json file off of it. if http_error.code < 400 and http_error.code >= 500: error = http_error - except urllib2.URLError as url_error: + except URLError as url_error: error = url_error - # pylint: enable=redefined-variable-type if old_results: # Strip the prefix and suffix so we can get the actual JSON object. @@ -426,7 +429,7 @@ def _InsertFailureSummaries(self, results_for_builder): # Create a test modifiers (FAILS, FLAKY etc) summary dictionary. entry = {} - for test_name in self._test_results_map.iterkeys(): + for test_name in self._test_results_map.keys(): result_char = self._GetModifierChar(test_name) entry[result_char] = entry.get(result_char, 0) + 1 @@ -466,7 +469,7 @@ def _InsertItemRunLengthEncoded(self, item, encoded_results): encoded_results: run-length encoded results. An array of arrays, e.g. [[3,'A'],[1,'Q']] encodes AAAQ. """ - if len(encoded_results) and item == encoded_results[0][1]: + if len(encoded_results) != 0 and item == encoded_results[0][1]: num_results = encoded_results[0][0] if num_results <= self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG: encoded_results[0][0] = num_results + 1 @@ -517,7 +520,7 @@ def _InsertTestTimeAndResult(self, test_name, tests): this_test[segment] = {} this_test = this_test[segment] - if not len(this_test): + if len(this_test) == 0: self._PopulateResultsAndTimesJSON(this_test) if self.RESULTS in this_test: @@ -543,7 +546,7 @@ def _ConvertJSONToCurrentVersion(self, results_json): # version 3->4 if archive_version == 3: - for results in results_json.values(): + for results in list(results_json.values()): self._ConvertTestsToTrie(results) results_json[self.VERSION_KEY] = self.VERSION @@ -554,7 +557,7 @@ def _ConvertTestsToTrie(self, results): test_results = results[self.TESTS] test_results_trie = {} - for test in test_results.iterkeys(): + for test in test_results.keys(): single_test_result = test_results[test] AddPathToTrie(test, single_test_result, test_results_trie) @@ -620,7 +623,7 @@ def _IsResultsAllOfType(self, results, result_type): return len(results) == 1 and results[0][1] == result_type -class _FileUploader(object): +class _FileUploader: def __init__(self, url, timeout_seconds): self._url = url @@ -629,7 +632,7 @@ def __init__(self, url, timeout_seconds): def UploadAsMultipartFormData(self, files, attrs): file_objs = [] for filename, path in files: - with file(path, 'rb') as fp: + with open(path, 'rb') as fp: file_objs.append(('file', filename, fp.read())) # FIXME: We should use the same variable names for the formal and actual @@ -642,12 +645,12 @@ def _UploadData(self, content_type, data): end = start + self._timeout_seconds while time.time() < end: try: - request = urllib2.Request(self._url, data, - {'Content-Type': content_type}) - return urllib2.urlopen(request) - except urllib2.HTTPError as e: - _log.warn("Received HTTP status %s loading \"%s\". " - 'Retrying in 10 seconds...', e.code, e.filename) + request = Request(self._url, data, {'Content-Type': content_type}) + return urlopen(request) + except HTTPError as e: + _log.warning( + 'Received HTTP status %s loading "%s". ' + 'Retrying in 10 seconds...', e.code, e.filename) time.sleep(10) @@ -678,7 +681,7 @@ def _EncodeMultipartFormData(fields, files): lines.append('--' + BOUNDARY) lines.append('Content-Disposition: form-data; name="%s"' % key) lines.append('') - if isinstance(value, unicode): + if isinstance(value, str): value = value.encode('utf-8') lines.append(value) @@ -688,7 +691,7 @@ def _EncodeMultipartFormData(fields, files): 'filename="%s"' % (key, filename)) lines.append('Content-Type: %s' % _GetMIMEType(filename)) lines.append('') - if isinstance(value, unicode): + if isinstance(value, str): value = value.encode('utf-8') lines.append(value) diff --git a/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py b/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py index d6aee057bfb0..b1d8bfdc91a7 100644 --- a/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py +++ b/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -47,16 +47,13 @@ def tearDown(self): def _TestJSONGeneration(self, passed_tests_list, failed_tests_list): tests_set = set(passed_tests_list) | set(failed_tests_list) - DISABLED_tests = set([t for t in tests_set - if t.startswith('DISABLED_')]) - FLAKY_tests = set([t for t in tests_set - if t.startswith('FLAKY_')]) - FAILS_tests = set([t for t in tests_set - if t.startswith('FAILS_')]) + DISABLED_tests = set(t for t in tests_set if t.startswith('DISABLED_')) + FLAKY_tests = set(t for t in tests_set if t.startswith('FLAKY_')) + FAILS_tests = set(t for t in tests_set if t.startswith('FAILS_')) PASS_tests = tests_set - (DISABLED_tests | FLAKY_tests | FAILS_tests) failed_tests = set(failed_tests_list) - DISABLED_tests - failed_count_map = dict([(t, 1) for t in failed_tests]) + failed_count_map = dict((t, 1) for t in failed_tests) test_timings = {} i = 0 @@ -64,7 +61,7 @@ def _TestJSONGeneration(self, passed_tests_list, failed_tests_list): test_timings[test] = float(self._num_runs * 100 + i) i += 1 - test_results_map = dict() + test_results_map = {} for test in tests_set: test_results_map[test] = json_results_generator.TestResult( test, failed=(test in failed_tests), @@ -76,7 +73,7 @@ def _TestJSONGeneration(self, passed_tests_list, failed_tests_list): None, # don't fetch past json results archive test_results_map) - failed_count_map = dict([(t, 1) for t in failed_tests]) + failed_count_map = dict((t, 1) for t in failed_tests) # Test incremental json results incremental_json = generator.GetJSON() @@ -114,7 +111,7 @@ def _VerifyJSONResults(self, tests_set, test_timings, failed_count_map, if tests_set or DISABLED_count: fixable = {} for fixable_items in buildinfo[JRG.FIXABLE]: - for (result_type, count) in fixable_items.iteritems(): + for (result_type, count) in fixable_items.items(): if result_type in fixable: fixable[result_type] = fixable[result_type] + count else: @@ -138,7 +135,7 @@ def _VerifyJSONResults(self, tests_set, test_timings, failed_count_map, if failed_count_map: tests = buildinfo[JRG.TESTS] - for test_name in failed_count_map.iterkeys(): + for test_name in failed_count_map.keys(): test = self._FindTestInTrie(test_name, tests) failed = 0 diff --git a/build/android/pylib/results/flakiness_dashboard/results_uploader.py b/build/android/pylib/results/flakiness_dashboard/results_uploader.py index b68a898b7da5..e3843358b01b 100644 --- a/build/android/pylib/results/flakiness_dashboard/results_uploader.py +++ b/build/android/pylib/results/flakiness_dashboard/results_uploader.py @@ -1,9 +1,9 @@ -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Uploads the results to the flakiness dashboard server.""" -# pylint: disable=E1002,R0201 +# pylint: disable=R0201 import logging import os @@ -25,18 +25,17 @@ class JSONResultsGenerator(json_results_generator.JSONResultsGeneratorBase): """ def __init__(self, builder_name, build_name, build_number, tmp_folder, test_results_map, test_results_server, test_type, master_name): - super(JSONResultsGenerator, self).__init__( - builder_name=builder_name, - build_name=build_name, - build_number=build_number, - results_file_base_path=tmp_folder, - builder_base_url=None, - test_results_map=test_results_map, - svn_repositories=(('webkit', 'third_party/WebKit'), - ('chrome', '.')), - test_results_server=test_results_server, - test_type=test_type, - master_name=master_name) + super().__init__(builder_name=builder_name, + build_name=build_name, + build_number=build_number, + results_file_base_path=tmp_folder, + builder_base_url=None, + test_results_map=test_results_map, + svn_repositories=(('webkit', 'third_party/WebKit'), + ('chrome', '.')), + test_results_server=test_results_server, + test_type=test_type, + master_name=master_name) #override def _GetModifierChar(self, test_name): @@ -61,7 +60,7 @@ def _is_git_directory(in_directory): if os.path.exists(os.path.join(in_directory, '.git')): return True parent = os.path.dirname(in_directory) - if parent == host_paths.DIR_SOURCE_ROOT or parent == in_directory: + if parent in (host_paths.DIR_SOURCE_ROOT, in_directory): return False return _is_git_directory(parent) @@ -70,8 +69,7 @@ def _is_git_directory(in_directory): if not os.path.exists(os.path.join(in_directory, '.svn')): if _is_git_directory(in_directory): return repo_utils.GetGitHeadSHA1(in_directory) - else: - return '' + return '' output = cmd_helper.GetCmdOutput(['svn', 'info', '--xml'], cwd=in_directory) try: @@ -82,7 +80,7 @@ def _is_git_directory(in_directory): return '' -class ResultsUploader(object): +class ResultsUploader: """Handles uploading buildbot tests results to the flakiness dashboard.""" def __init__(self, tests_type): self._build_number = os.environ.get('BUILDBOT_BUILDNUMBER') diff --git a/build/android/pylib/results/json_results.py b/build/android/pylib/results/json_results.py index 3397d15ef1a3..6fd64758ae9e 100644 --- a/build/android/pylib/results/json_results.py +++ b/build/android/pylib/results/json_results.py @@ -1,8 +1,7 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import import collections import itertools import json @@ -233,6 +232,7 @@ def string_as_status(s): results_list.extend( [base_test_result.BaseTestResult(test, string_as_status(tr['status']), - duration=tr['elapsed_time_ms']) + duration=tr['elapsed_time_ms'], + log=tr.get('output_snippet')) for tr in test_runs]) return results_list diff --git a/build/android/pylib/results/json_results_test.py b/build/android/pylib/results/json_results_test.py index f9d881492968..061aa8cedf1a 100755 --- a/build/android/pylib/results/json_results_test.py +++ b/build/android/pylib/results/json_results_test.py @@ -1,9 +1,8 @@ -#!/usr/bin/env vpython -# Copyright 2014 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import absolute_import import unittest import six @@ -21,18 +20,16 @@ def testGenerateResultsDict_passedResult(self): all_results.AddResult(result) results_dict = json_results.GenerateResultsDict([all_results]) - self.assertEquals( - ['test.package.TestName'], - results_dict['all_tests']) - self.assertEquals(1, len(results_dict['per_iteration_data'])) + self.assertEqual(['test.package.TestName'], results_dict['all_tests']) + self.assertEqual(1, len(results_dict['per_iteration_data'])) iteration_result = results_dict['per_iteration_data'][0] self.assertTrue('test.package.TestName' in iteration_result) - self.assertEquals(1, len(iteration_result['test.package.TestName'])) + self.assertEqual(1, len(iteration_result['test.package.TestName'])) test_iteration_result = iteration_result['test.package.TestName'][0] self.assertTrue('status' in test_iteration_result) - self.assertEquals('SUCCESS', test_iteration_result['status']) + self.assertEqual('SUCCESS', test_iteration_result['status']) def testGenerateResultsDict_skippedResult(self): result = base_test_result.BaseTestResult( @@ -42,18 +39,16 @@ def testGenerateResultsDict_skippedResult(self): all_results.AddResult(result) results_dict = json_results.GenerateResultsDict([all_results]) - self.assertEquals( - ['test.package.TestName'], - results_dict['all_tests']) - self.assertEquals(1, len(results_dict['per_iteration_data'])) + self.assertEqual(['test.package.TestName'], results_dict['all_tests']) + self.assertEqual(1, len(results_dict['per_iteration_data'])) iteration_result = results_dict['per_iteration_data'][0] self.assertTrue('test.package.TestName' in iteration_result) - self.assertEquals(1, len(iteration_result['test.package.TestName'])) + self.assertEqual(1, len(iteration_result['test.package.TestName'])) test_iteration_result = iteration_result['test.package.TestName'][0] self.assertTrue('status' in test_iteration_result) - self.assertEquals('SKIPPED', test_iteration_result['status']) + self.assertEqual('SKIPPED', test_iteration_result['status']) def testGenerateResultsDict_failedResult(self): result = base_test_result.BaseTestResult( @@ -63,18 +58,16 @@ def testGenerateResultsDict_failedResult(self): all_results.AddResult(result) results_dict = json_results.GenerateResultsDict([all_results]) - self.assertEquals( - ['test.package.TestName'], - results_dict['all_tests']) - self.assertEquals(1, len(results_dict['per_iteration_data'])) + self.assertEqual(['test.package.TestName'], results_dict['all_tests']) + self.assertEqual(1, len(results_dict['per_iteration_data'])) iteration_result = results_dict['per_iteration_data'][0] self.assertTrue('test.package.TestName' in iteration_result) - self.assertEquals(1, len(iteration_result['test.package.TestName'])) + self.assertEqual(1, len(iteration_result['test.package.TestName'])) test_iteration_result = iteration_result['test.package.TestName'][0] self.assertTrue('status' in test_iteration_result) - self.assertEquals('FAILURE', test_iteration_result['status']) + self.assertEqual('FAILURE', test_iteration_result['status']) def testGenerateResultsDict_duration(self): result = base_test_result.BaseTestResult( @@ -84,18 +77,16 @@ def testGenerateResultsDict_duration(self): all_results.AddResult(result) results_dict = json_results.GenerateResultsDict([all_results]) - self.assertEquals( - ['test.package.TestName'], - results_dict['all_tests']) - self.assertEquals(1, len(results_dict['per_iteration_data'])) + self.assertEqual(['test.package.TestName'], results_dict['all_tests']) + self.assertEqual(1, len(results_dict['per_iteration_data'])) iteration_result = results_dict['per_iteration_data'][0] self.assertTrue('test.package.TestName' in iteration_result) - self.assertEquals(1, len(iteration_result['test.package.TestName'])) + self.assertEqual(1, len(iteration_result['test.package.TestName'])) test_iteration_result = iteration_result['test.package.TestName'][0] self.assertTrue('elapsed_time_ms' in test_iteration_result) - self.assertEquals(123, test_iteration_result['elapsed_time_ms']) + self.assertEqual(123, test_iteration_result['elapsed_time_ms']) def testGenerateResultsDict_multipleResults(self): result1 = base_test_result.BaseTestResult( @@ -108,13 +99,12 @@ def testGenerateResultsDict_multipleResults(self): all_results.AddResult(result2) results_dict = json_results.GenerateResultsDict([all_results]) - self.assertEquals( - ['test.package.TestName1', 'test.package.TestName2'], - results_dict['all_tests']) + self.assertEqual(['test.package.TestName1', 'test.package.TestName2'], + results_dict['all_tests']) self.assertTrue('per_iteration_data' in results_dict) iterations = results_dict['per_iteration_data'] - self.assertEquals(1, len(iterations)) + self.assertEqual(1, len(iterations)) expected_tests = set([ 'test.package.TestName1', @@ -124,11 +114,11 @@ def testGenerateResultsDict_multipleResults(self): for test_name, iteration_result in six.iteritems(iterations[0]): self.assertTrue(test_name in expected_tests) expected_tests.remove(test_name) - self.assertEquals(1, len(iteration_result)) + self.assertEqual(1, len(iteration_result)) test_iteration_result = iteration_result[0] self.assertTrue('status' in test_iteration_result) - self.assertEquals('SUCCESS', test_iteration_result['status']) + self.assertEqual('SUCCESS', test_iteration_result['status']) def testGenerateResultsDict_passOnRetry(self): raw_results = [] @@ -146,28 +136,28 @@ def testGenerateResultsDict_passOnRetry(self): raw_results.append(run_results2) results_dict = json_results.GenerateResultsDict([raw_results]) - self.assertEquals(['test.package.TestName1'], results_dict['all_tests']) + self.assertEqual(['test.package.TestName1'], results_dict['all_tests']) # Check that there's only one iteration. self.assertIn('per_iteration_data', results_dict) iterations = results_dict['per_iteration_data'] - self.assertEquals(1, len(iterations)) + self.assertEqual(1, len(iterations)) # Check that test.package.TestName1 is the only test in the iteration. - self.assertEquals(1, len(iterations[0])) + self.assertEqual(1, len(iterations[0])) self.assertIn('test.package.TestName1', iterations[0]) # Check that there are two results for test.package.TestName1. actual_test_results = iterations[0]['test.package.TestName1'] - self.assertEquals(2, len(actual_test_results)) + self.assertEqual(2, len(actual_test_results)) # Check that the first result is a failure. self.assertIn('status', actual_test_results[0]) - self.assertEquals('FAILURE', actual_test_results[0]['status']) + self.assertEqual('FAILURE', actual_test_results[0]['status']) # Check that the second result is a success. self.assertIn('status', actual_test_results[1]) - self.assertEquals('SUCCESS', actual_test_results[1]['status']) + self.assertEqual('SUCCESS', actual_test_results[1]['status']) def testGenerateResultsDict_globalTags(self): raw_results = [] @@ -175,7 +165,7 @@ def testGenerateResultsDict_globalTags(self): results_dict = json_results.GenerateResultsDict( [raw_results], global_tags=global_tags) - self.assertEquals(['UNRELIABLE_RESULTS'], results_dict['global_tags']) + self.assertEqual(['UNRELIABLE_RESULTS'], results_dict['global_tags']) def testGenerateResultsDict_loslessSnippet(self): result = base_test_result.BaseTestResult( @@ -187,22 +177,20 @@ def testGenerateResultsDict_loslessSnippet(self): all_results.AddResult(result) results_dict = json_results.GenerateResultsDict([all_results]) - self.assertEquals( - ['test.package.TestName'], - results_dict['all_tests']) - self.assertEquals(1, len(results_dict['per_iteration_data'])) + self.assertEqual(['test.package.TestName'], results_dict['all_tests']) + self.assertEqual(1, len(results_dict['per_iteration_data'])) iteration_result = results_dict['per_iteration_data'][0] self.assertTrue('test.package.TestName' in iteration_result) - self.assertEquals(1, len(iteration_result['test.package.TestName'])) + self.assertEqual(1, len(iteration_result['test.package.TestName'])) test_iteration_result = iteration_result['test.package.TestName'][0] self.assertTrue('losless_snippet' in test_iteration_result) self.assertTrue(test_iteration_result['losless_snippet']) self.assertTrue('output_snippet' in test_iteration_result) - self.assertEquals(log, test_iteration_result['output_snippet']) + self.assertEqual(log, test_iteration_result['output_snippet']) self.assertTrue('output_snippet_base64' in test_iteration_result) - self.assertEquals('', test_iteration_result['output_snippet_base64']) + self.assertEqual('', test_iteration_result['output_snippet_base64']) def testGenerateJsonTestResultFormatDict_passedResult(self): result = base_test_result.BaseTestResult('test.package.TestName', @@ -213,19 +201,19 @@ def testGenerateJsonTestResultFormatDict_passedResult(self): results_dict = json_results.GenerateJsonTestResultFormatDict([all_results], False) - self.assertEquals(1, len(results_dict['tests'])) - self.assertEquals(1, len(results_dict['tests']['test'])) - self.assertEquals(1, len(results_dict['tests']['test']['package'])) - self.assertEquals( + self.assertEqual(1, len(results_dict['tests'])) + self.assertEqual(1, len(results_dict['tests']['test'])) + self.assertEqual(1, len(results_dict['tests']['test']['package'])) + self.assertEqual( 'PASS', results_dict['tests']['test']['package']['TestName']['expected']) - self.assertEquals( + self.assertEqual( 'PASS', results_dict['tests']['test']['package']['TestName']['actual']) self.assertTrue('FAIL' not in results_dict['num_failures_by_type'] or results_dict['num_failures_by_type']['FAIL'] == 0) self.assertIn('PASS', results_dict['num_failures_by_type']) - self.assertEquals(1, results_dict['num_failures_by_type']['PASS']) + self.assertEqual(1, results_dict['num_failures_by_type']['PASS']) def testGenerateJsonTestResultFormatDict_failedResult(self): result = base_test_result.BaseTestResult('test.package.TestName', @@ -236,22 +224,50 @@ def testGenerateJsonTestResultFormatDict_failedResult(self): results_dict = json_results.GenerateJsonTestResultFormatDict([all_results], False) - self.assertEquals(1, len(results_dict['tests'])) - self.assertEquals(1, len(results_dict['tests']['test'])) - self.assertEquals(1, len(results_dict['tests']['test']['package'])) - self.assertEquals( + self.assertEqual(1, len(results_dict['tests'])) + self.assertEqual(1, len(results_dict['tests']['test'])) + self.assertEqual(1, len(results_dict['tests']['test']['package'])) + self.assertEqual( 'PASS', results_dict['tests']['test']['package']['TestName']['expected']) - self.assertEquals( + self.assertEqual( 'FAIL', results_dict['tests']['test']['package']['TestName']['actual']) - self.assertEquals( + self.assertEqual( True, results_dict['tests']['test']['package']['TestName']['is_unexpected']) self.assertTrue('PASS' not in results_dict['num_failures_by_type'] or results_dict['num_failures_by_type']['PASS'] == 0) self.assertIn('FAIL', results_dict['num_failures_by_type']) - self.assertEquals(1, results_dict['num_failures_by_type']['FAIL']) + self.assertEqual(1, results_dict['num_failures_by_type']['FAIL']) + + def testGenerateJsonTestResultFormatDict_skippedResult(self): + result = base_test_result.BaseTestResult('test.package.TestName', + base_test_result.ResultType.SKIP) + + all_results = base_test_result.TestRunResults() + all_results.AddResult(result) + + results_dict = json_results.GenerateJsonTestResultFormatDict([all_results], + False) + self.assertEqual(1, len(results_dict['tests'])) + self.assertEqual(1, len(results_dict['tests']['test'])) + self.assertEqual(1, len(results_dict['tests']['test']['package'])) + self.assertEqual( + 'PASS', + results_dict['tests']['test']['package']['TestName']['expected']) + self.assertEqual( + 'SKIP', results_dict['tests']['test']['package']['TestName']['actual']) + # Should only be set if the test fails. + self.assertNotIn('is_unexpected', + results_dict['tests']['test']['package']['TestName']) + + self.assertTrue('FAIL' not in results_dict['num_failures_by_type'] + or results_dict['num_failures_by_type']['FAIL'] == 0) + self.assertTrue('PASS' not in results_dict['num_failures_by_type'] + or results_dict['num_failures_by_type']['PASS'] == 0) + self.assertIn('SKIP', results_dict['num_failures_by_type']) + self.assertEqual(1, results_dict['num_failures_by_type']['SKIP']) def testGenerateJsonTestResultFormatDict_skippedResult(self): result = base_test_result.BaseTestResult('test.package.TestName', @@ -297,16 +313,16 @@ def testGenerateJsonTestResultFormatDict_failedResultWithRetry(self): results_dict = json_results.GenerateJsonTestResultFormatDict( all_results, False) - self.assertEquals(1, len(results_dict['tests'])) - self.assertEquals(1, len(results_dict['tests']['test'])) - self.assertEquals(1, len(results_dict['tests']['test']['package'])) - self.assertEquals( + self.assertEqual(1, len(results_dict['tests'])) + self.assertEqual(1, len(results_dict['tests']['test'])) + self.assertEqual(1, len(results_dict['tests']['test']['package'])) + self.assertEqual( 'PASS', results_dict['tests']['test']['package']['TestName']['expected']) - self.assertEquals( + self.assertEqual( 'FAIL FAIL', results_dict['tests']['test']['package']['TestName']['actual']) - self.assertEquals( + self.assertEqual( True, results_dict['tests']['test']['package']['TestName']['is_unexpected']) @@ -315,7 +331,7 @@ def testGenerateJsonTestResultFormatDict_failedResultWithRetry(self): # According to the spec: If a test was run more than once, only the first # invocation's result is included in the totals. self.assertIn('FAIL', results_dict['num_failures_by_type']) - self.assertEquals(1, results_dict['num_failures_by_type']['FAIL']) + self.assertEqual(1, results_dict['num_failures_by_type']['FAIL']) if __name__ == '__main__': diff --git a/build/android/pylib/results/presentation/__init__.py b/build/android/pylib/results/presentation/__init__.py index a22a6ee39a97..b8e1dbd6e92a 100644 --- a/build/android/pylib/results/presentation/__init__.py +++ b/build/android/pylib/results/presentation/__init__.py @@ -1,3 +1,3 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/results/presentation/javascript/main_html.js b/build/android/pylib/results/presentation/javascript/main_html.js index 3d94663e33f1..e4bf2cc3fd9b 100644 --- a/build/android/pylib/results/presentation/javascript/main_html.js +++ b/build/android/pylib/results/presentation/javascript/main_html.js @@ -1,4 +1,4 @@ -// Copyright 2017 The Chromium Authors. All rights reserved. +// Copyright 2017 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/pylib/results/presentation/standard_gtest_merge.py b/build/android/pylib/results/presentation/standard_gtest_merge.py index 58a29366c4f2..ab1074e268f3 100755 --- a/build/android/pylib/results/presentation/standard_gtest_merge.py +++ b/build/android/pylib/results/presentation/standard_gtest_merge.py @@ -1,10 +1,9 @@ -#! /usr/bin/env python +#! /usr/bin/env python3 # -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import print_function import argparse import json @@ -22,6 +21,9 @@ def merge_shard_results(summary_json, jsons_to_merge): with open(summary_json) as f: summary = json.load(f) except (IOError, ValueError): + # TODO(crbug.com/1245494):Re-enable this check after the recipe module + # chromium_swarming can run it with py3 + # pylint: disable=raise-missing-from raise Exception('Summary json cannot be loaded.') # Merge all JSON files together. Keep track of missing shards. @@ -43,17 +45,17 @@ def merge_shard_results(summary_json, jsons_to_merge): # client/swarming.py, which means the state enum is saved in its string # name form, not in the number form. state = result.get('state') - if state == u'BOT_DIED': + if state == 'BOT_DIED': print( 'Shard #%d had a Swarming internal failure' % index, file=sys.stderr) - elif state == u'EXPIRED': + elif state == 'EXPIRED': print('There wasn\'t enough capacity to run your test', file=sys.stderr) - elif state == u'TIMED_OUT': + elif state == 'TIMED_OUT': print('Test runtime exceeded allocated time' 'Either it ran for too long (hard timeout) or it didn\'t produce ' 'I/O for an extended period of time (I/O timeout)', file=sys.stderr) - elif state != u'COMPLETED': + elif state != 'COMPLETED': print('Invalid Swarming task state: %s' % state, file=sys.stderr) json_data, err_msg = load_shard_json(index, result.get('task_id'), @@ -111,7 +113,7 @@ def load_shard_json(index, task_id, jsons_to_merge): if not matching_json_files: print('shard %s test output missing' % index, file=sys.stderr) return (None, 'shard %s test output was missing' % index) - elif len(matching_json_files) > 1: + if len(matching_json_files) > 1: print('duplicate test output for shard %s' % index, file=sys.stderr) return (None, 'shard %s test output was duplicated' % index) @@ -138,7 +140,7 @@ def load_shard_json(index, task_id, jsons_to_merge): def merge_list_of_dicts(left, right): """Merges dicts left[0] with right[0], left[1] with right[1], etc.""" output = [] - for i in xrange(max(len(left), len(right))): + for i in range(max(len(left), len(right))): left_dict = left[i] if i < len(left) else {} right_dict = right[i] if i < len(right) else {} merged_dict = left_dict.copy() @@ -151,7 +153,7 @@ def standard_gtest_merge( output_json, summary_json, jsons_to_merge): output = merge_shard_results(summary_json, jsons_to_merge) - with open(output_json, 'wb') as f: + with open(output_json, 'w') as f: json.dump(output, f) return 0 diff --git a/build/android/pylib/results/presentation/test_results_presentation.py b/build/android/pylib/results/presentation/test_results_presentation.py index 33fae042fca8..9e8b2804156a 100755 --- a/build/android/pylib/results/presentation/test_results_presentation.py +++ b/build/android/pylib/results/presentation/test_results_presentation.py @@ -1,10 +1,10 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import print_function + import argparse import collections @@ -14,7 +14,12 @@ import tempfile import os import sys -import urllib +try: + from urllib.parse import urlencode + from urllib.request import urlopen +except ImportError: + from urllib import urlencode + from urllib2 import urlopen CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -49,7 +54,7 @@ def pre_cell(data, html_class='center'): } -class LinkTarget(object): +class LinkTarget: # Opens the linked document in a new window or tab. NEW_TAB = '_blank' # Opens the linked document in the same frame as it was clicked. @@ -103,20 +108,22 @@ def action_cell(action, data, html_class): } -def flakiness_dashbord_link(test_name, suite_name): - url_args = urllib.urlencode([ - ('testType', suite_name), - ('tests', test_name)]) - return ('https://test-results.appspot.com/' - 'dashboards/flakiness_dashboard.html#%s' % url_args) +def flakiness_dashbord_link(test_name, suite_name, bucket): + # Assume the bucket will be like "foo-bar-baz", we will take "foo" + # as the test_project. + # Fallback to "chromium" if bucket is not passed, e.g. local_output=True + test_project = bucket.split('-')[0] if bucket else 'chromium' + query = '%s/%s' % (suite_name, test_name) + url_args = urlencode([('t', 'TESTS'), ('q', query), ('tp', test_project)]) + return 'https://ci.chromium.org/ui/search?%s' % url_args -def logs_cell(result, test_name, suite_name): +def logs_cell(result, test_name, suite_name, bucket): """Formats result logs data for processing in jinja template.""" link_list = [] result_link_dict = result.get('links', {}) result_link_dict['flakiness'] = flakiness_dashbord_link( - test_name, suite_name) + test_name, suite_name, bucket) for name, href in sorted(result_link_dict.items()): link_list.append(link( data=name, @@ -124,8 +131,7 @@ def logs_cell(result, test_name, suite_name): target=LinkTarget.NEW_TAB)) if link_list: return links_cell(link_list) - else: - return cell('(no logs)') + return cell('(no logs)') def code_search(test, cs_base_url): @@ -144,7 +150,7 @@ def status_class(status): return status -def create_test_table(results_dict, cs_base_url, suite_name): +def create_test_table(results_dict, cs_base_url, suite_name, bucket): """Format test data for injecting into HTML table.""" header_row = [ @@ -156,7 +162,7 @@ def create_test_table(results_dict, cs_base_url, suite_name): ] test_row_blocks = [] - for test_name, test_results in results_dict.iteritems(): + for test_name, test_results in results_dict.items(): test_runs = [] for index, result in enumerate(test_results): if index == 0: @@ -177,7 +183,8 @@ def create_test_table(results_dict, cs_base_url, suite_name): html_class=('center %s' % status_class(result['status']))), cell(data=result['elapsed_time_ms']), # elapsed_time_ms - logs_cell(result, test_name, suite_name), # logs + logs_cell(result, test_name, suite_name, bucket), + # logs pre_cell(data=result['output_snippet'], # output_snippet html_class='left'), ]) @@ -214,31 +221,25 @@ def create_suite_table(results_dict): cell(data=0), # elapsed_time_ms ] - suite_row_dict = {} - for test_name, test_results in results_dict.iteritems(): + suite_row_dict = collections.defaultdict(lambda: [ + # Note: |suite_name| will be given in the following for loop. + # It is not assigned yet here. + action_cell('showTestsOfOneSuiteOnlyWithNewState("%s")' % suite_name, + suite_name, 'left'), # suite_name + cell(data=0), # number_success_tests + cell(data=0), # number_fail_tests + cell(data=0), # all_tests + cell(data=0), # elapsed_time_ms + ]) + for test_name, test_results in results_dict.items(): # TODO(mikecase): This logic doesn't work if there are multiple test runs. # That is, if 'per_iteration_data' has multiple entries. # Since we only care about the result of the last test run. result = test_results[-1] - suite_name = (test_name.split('#')[0] if '#' in test_name - else test_name.split('.')[0]) - if suite_name in suite_row_dict: - suite_row = suite_row_dict[suite_name] - else: - suite_row = [ - action_cell( - 'showTestsOfOneSuiteOnlyWithNewState("%s")' % suite_name, - suite_name, - 'left' - ), # suite_name - cell(data=0), # number_success_tests - cell(data=0), # number_fail_tests - cell(data=0), # all_tests - cell(data=0), # elapsed_time_ms - ] - - suite_row_dict[suite_name] = suite_row + suite_name = (test_name.split('#')[0] + if '#' in test_name else test_name.split('.')[0]) + suite_row = suite_row_dict[suite_name] suite_row[ALL_COUNT_INDEX]['data'] += 1 footer_row[ALL_COUNT_INDEX]['data'] += 1 @@ -255,7 +256,7 @@ def create_suite_table(results_dict): suite_row[TIME_INDEX]['data'] += result['elapsed_time_ms'] footer_row[TIME_INDEX]['data'] += result['elapsed_time_ms'] - for suite in suite_row_dict.values(): + for suite in list(suite_row_dict.values()): if suite[FAIL_COUNT_INDEX]['data'] > 0: suite[FAIL_COUNT_INDEX]['class'] += ' failure' else: @@ -266,13 +267,12 @@ def create_suite_table(results_dict): else: footer_row[FAIL_COUNT_INDEX]['class'] += ' success' - return (header_row, - [[suite_row] for suite_row in suite_row_dict.values()], + return (header_row, [[suite_row] + for suite_row in list(suite_row_dict.values())], footer_row) def feedback_url(result_details_link): - # pylint: disable=redefined-variable-type url_args = [ ('labels', 'Pri-2,Type-Bug,Restrict-View-Google'), ('summary', 'Result Details Feedback:'), @@ -280,8 +280,7 @@ def feedback_url(result_details_link): ] if result_details_link: url_args.append(('comment', 'Please check out: %s' % result_details_link)) - url_args = urllib.urlencode(url_args) - # pylint: enable=redefined-variable-type + url_args = urlencode(url_args) return 'https://bugs.chromium.org/p/chromium/issues/entry?%s' % url_args @@ -294,7 +293,7 @@ def results_to_html(results_dict, cs_base_url, bucket, test_name, just a local file. """ test_rows_header, test_rows = create_test_table( - results_dict, cs_base_url, test_name) + results_dict, cs_base_url, test_name, bucket) suite_rows_header, suite_rows, suite_row_footer = create_suite_table( results_dict) @@ -321,17 +320,16 @@ def results_to_html(results_dict, cs_base_url, bucket, test_name, 'feedback_url': feedback_url(None), }) return (html_render, None, None) - else: - dest = google_storage_helper.unique_name( - '%s_%s_%s' % (test_name, builder_name, build_number)) - result_details_link = google_storage_helper.get_url_link( - dest, '%s/html' % bucket) - html_render = main_template.render( # pylint: disable=no-member - { - 'tb_values': [suite_table_values, test_table_values], - 'feedback_url': feedback_url(result_details_link), - }) - return (html_render, dest, result_details_link) + dest = google_storage_helper.unique_name( + '%s_%s_%s' % (test_name, builder_name, build_number)) + result_details_link = google_storage_helper.get_url_link( + dest, '%s/html' % bucket) + html_render = main_template.render( # pylint: disable=no-member + { + 'tb_values': [suite_table_values, test_table_values], + 'feedback_url': feedback_url(result_details_link), + }) + return (html_render, dest, result_details_link) def result_details(json_path, test_name, cs_base_url, bucket=None, @@ -351,7 +349,7 @@ def result_details(json_path, test_name, cs_base_url, bucket=None, results_dict = collections.defaultdict(list) for testsuite_run in json_object['per_iteration_data']: - for test, test_runs in testsuite_run.iteritems(): + for test, test_runs in testsuite_run.items(): results_dict[test].extend(test_runs) return results_to_html(results_dict, cs_base_url, bucket, test_name, builder_name, build_number, local_output) @@ -378,12 +376,12 @@ def ui_screenshot_set(json_path): ui_screenshots = [] # pylint: disable=too-many-nested-blocks for testsuite_run in json_object['per_iteration_data']: - for _, test_runs in testsuite_run.iteritems(): + for _, test_runs in testsuite_run.items(): for test_run in test_runs: if 'ui screenshot' in test_run['links']: screenshot_link = test_run['links']['ui screenshot'] if screenshot_link.startswith('file:'): - with contextlib.closing(urllib.urlopen(screenshot_link)) as f: + with contextlib.closing(urlopen(screenshot_link)) as f: test_screenshots = json.load(f) else: # Assume anything that isn't a file link is a google storage link @@ -410,7 +408,7 @@ def upload_screenshot_set(json_path, test_name, bucket, builder_name, dest = google_storage_helper.unique_name( 'screenshots_%s_%s_%s' % (test_name, builder_name, build_number), suffix='.json') - with tempfile.NamedTemporaryFile(suffix='.json') as temp_file: + with tempfile.NamedTemporaryFile(mode='w', suffix='.json') as temp_file: temp_file.write(screenshot_set) temp_file.flush() return google_storage_helper.upload( @@ -470,7 +468,7 @@ def main(): with open(args.output_json, 'w') as f: json.dump({}, f) return - elif len(args.positional) != 0 and args.json_file: + if len(args.positional) != 0 and args.json_file: raise parser.error('Exactly one of args.positional and ' 'args.json_file should be given.') @@ -520,8 +518,7 @@ def main(): if ui_screenshot_set_link: ui_catalog_url = 'https://chrome-ui-catalog.appspot.com/' - ui_catalog_query = urllib.urlencode( - {'screenshot_source': ui_screenshot_set_link}) + ui_catalog_query = urlencode({'screenshot_source': ui_screenshot_set_link}) ui_screenshot_link = '%s?%s' % (ui_catalog_url, ui_catalog_query) if args.output_json: diff --git a/build/android/pylib/results/presentation/test_results_presentation.pydeps b/build/android/pylib/results/presentation/test_results_presentation.pydeps new file mode 100644 index 000000000000..031e1793bf52 --- /dev/null +++ b/build/android/pylib/results/presentation/test_results_presentation.pydeps @@ -0,0 +1,46 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/pylib/results/presentation --output build/android/pylib/results/presentation/test_results_presentation.pydeps build/android/pylib/results/presentation/test_results_presentation.py +../../../../../third_party/catapult/devil/devil/__init__.py +../../../../../third_party/catapult/devil/devil/android/__init__.py +../../../../../third_party/catapult/devil/devil/android/constants/__init__.py +../../../../../third_party/catapult/devil/devil/android/constants/chrome.py +../../../../../third_party/catapult/devil/devil/android/sdk/__init__.py +../../../../../third_party/catapult/devil/devil/android/sdk/keyevent.py +../../../../../third_party/catapult/devil/devil/android/sdk/version_codes.py +../../../../../third_party/catapult/devil/devil/base_error.py +../../../../../third_party/catapult/devil/devil/constants/__init__.py +../../../../../third_party/catapult/devil/devil/constants/exit_codes.py +../../../../../third_party/catapult/devil/devil/utils/__init__.py +../../../../../third_party/catapult/devil/devil/utils/cmd_helper.py +../../../../../third_party/jinja2/__init__.py +../../../../../third_party/jinja2/_identifier.py +../../../../../third_party/jinja2/async_utils.py +../../../../../third_party/jinja2/bccache.py +../../../../../third_party/jinja2/compiler.py +../../../../../third_party/jinja2/defaults.py +../../../../../third_party/jinja2/environment.py +../../../../../third_party/jinja2/exceptions.py +../../../../../third_party/jinja2/filters.py +../../../../../third_party/jinja2/idtracking.py +../../../../../third_party/jinja2/lexer.py +../../../../../third_party/jinja2/loaders.py +../../../../../third_party/jinja2/nodes.py +../../../../../third_party/jinja2/optimizer.py +../../../../../third_party/jinja2/parser.py +../../../../../third_party/jinja2/runtime.py +../../../../../third_party/jinja2/tests.py +../../../../../third_party/jinja2/utils.py +../../../../../third_party/jinja2/visitor.py +../../../../../third_party/markupsafe/__init__.py +../../../../../third_party/markupsafe/_compat.py +../../../../../third_party/markupsafe/_native.py +../../__init__.py +../../constants/__init__.py +../../constants/host_paths.py +../../utils/__init__.py +../../utils/decorators.py +../../utils/google_storage_helper.py +../__init__.py +__init__.py +standard_gtest_merge.py +test_results_presentation.py diff --git a/build/android/pylib/results/report_results.py b/build/android/pylib/results/report_results.py index 66b0717dcef3..623c8cd29b0c 100644 --- a/build/android/pylib/results/report_results.py +++ b/build/android/pylib/results/report_results.py @@ -1,10 +1,9 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Module containing utility functions for reporting results.""" -from __future__ import print_function from __future__ import absolute_import import logging @@ -109,17 +108,17 @@ def LogFull(results, test_type, test_package, annotation=None, logging.critical('*' * 80) logging.critical('Summary') logging.critical('*' * 80) - for line in results.GetGtestForm().splitlines(): - color = black_on_white - if 'FAILED' in line: - # Red on white, dim. - color = (logging_utils.BACK.WHITE, logging_utils.FORE.RED, - logging_utils.STYLE.DIM) - elif 'PASSED' in line: - # Green on white, dim. - color = (logging_utils.BACK.WHITE, logging_utils.FORE.GREEN, - logging_utils.STYLE.DIM) - with logging_utils.OverrideColor(logging.CRITICAL, color): + # Assign uniform color, depending on presence of 'FAILED' over lines. + if any('FAILED' in line for line in results.GetGtestForm().splitlines()): + # Red on white, dim. + color = (logging_utils.BACK.WHITE, logging_utils.FORE.RED, + logging_utils.STYLE.DIM) + else: + # Green on white, dim. + color = (logging_utils.BACK.WHITE, logging_utils.FORE.GREEN, + logging_utils.STYLE.DIM) + with logging_utils.OverrideColor(logging.CRITICAL, color): + for line in results.GetGtestForm().splitlines(): logging.critical(line) logging.critical('*' * 80) diff --git a/build/android/pylib/symbols/apk_lib_dump.py b/build/android/pylib/symbols/apk_lib_dump.py deleted file mode 100755 index ba870266976c..000000000000 --- a/build/android/pylib/symbols/apk_lib_dump.py +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Dump shared library information from an APK file. - -This script is used to dump which *uncompressed* native shared libraries an -APK contains, as well as their position within the file. This is mostly useful -to diagnose logcat and tombstone symbolization issues when the libraries are -loaded directly from the APK at runtime. - -The default format will print one line per uncompressed shared library with the -following format: - - 0x 0x 0x - -The --format=python option can be used to dump the same information that is -easy to use in a Python script, e.g. with a line like: - - (0x, 0x, 0x, ), -""" - -from __future__ import print_function - -import argparse -import os -import sys - -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..')) - -from pylib.symbols import apk_native_libs - -def main(): - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - - parser.add_argument('apk', help='Input APK file path.') - - parser.add_argument('--format', help='Select output format', - default='default', choices=['default', 'python']) - - args = parser.parse_args() - - apk_reader = apk_native_libs.ApkReader(args.apk) - lib_map = apk_native_libs.ApkNativeLibraries(apk_reader) - for lib_path, file_offset, file_size in lib_map.GetDumpList(): - if args.format == 'python': - print('(0x%08x, 0x%08x, 0x%08x, \'%s\'),' % - (file_offset, file_offset + file_size, file_size, lib_path)) - else: - print('0x%08x 0x%08x 0x%08x %s' % (file_offset, file_offset + file_size, - file_size, lib_path)) - - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/build/android/pylib/symbols/apk_native_libs.py b/build/android/pylib/symbols/apk_native_libs.py deleted file mode 100644 index c4af2029061e..000000000000 --- a/build/android/pylib/symbols/apk_native_libs.py +++ /dev/null @@ -1,419 +0,0 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import logging -import os -import re -import struct -import zipfile - -# The default zipfile python module cannot open APKs properly, but this -# fixes it. Note that simply importing this file is sufficient to -# ensure that zip works correctly for all other modules. See: -# http://bugs.python.org/issue14315 -# https://hg.python.org/cpython/rev/6dd5e9556a60#l2.8 -def _PatchZipFile(): - # pylint: disable=protected-access - oldDecodeExtra = zipfile.ZipInfo._decodeExtra - def decodeExtra(self): - try: - oldDecodeExtra(self) - except struct.error: - pass - zipfile.ZipInfo._decodeExtra = decodeExtra -_PatchZipFile() - - -class ApkZipInfo(object): - """Models a single file entry from an ApkReader. - - This is very similar to the zipfile.ZipInfo class. It provides a few - properties describing the entry: - - filename (same as ZipInfo.filename) - - file_size (same as ZipInfo.file_size) - - compress_size (same as ZipInfo.file_size) - - file_offset (note: not provided by ZipInfo) - - And a few useful methods: IsCompressed() and IsElfFile(). - - Entries can be created by using ApkReader() methods. - """ - def __init__(self, zip_file, zip_info): - """Construct instance. Do not call this directly. Use ApkReader methods.""" - self._file = zip_file - self._info = zip_info - self._file_offset = None - - @property - def filename(self): - """Entry's file path within APK.""" - return self._info.filename - - @property - def file_size(self): - """Entry's extracted file size in bytes.""" - return self._info.file_size - - @property - def compress_size(self): - """Entry' s compressed file size in bytes.""" - return self._info.compress_size - - @property - def file_offset(self): - """Entry's starting file offset in the APK.""" - if self._file_offset is None: - self._file_offset = self._ZipFileOffsetFromLocalHeader( - self._file.fp, self._info.header_offset) - return self._file_offset - - def __repr__(self): - """Convert to string for debugging.""" - return 'ApkZipInfo["%s",size=0x%x,compressed=0x%x,offset=0x%x]' % ( - self.filename, self.file_size, self.compress_size, self.file_offset) - - def IsCompressed(self): - """Returns True iff the entry is compressed.""" - return self._info.compress_type != zipfile.ZIP_STORED - - def IsElfFile(self): - """Returns True iff the entry is an ELF file.""" - with self._file.open(self._info, 'r') as f: - return f.read(4) == '\x7fELF' - - @staticmethod - def _ZipFileOffsetFromLocalHeader(fd, local_header_offset): - """Return a file's start offset from its zip archive local header. - - Args: - fd: Input file object. - local_header_offset: Local header offset (from its ZipInfo entry). - Returns: - file start offset. - """ - FILE_NAME_LEN_OFFSET = 26 - FILE_NAME_OFFSET = 30 - fd.seek(local_header_offset + FILE_NAME_LEN_OFFSET) - file_name_len = struct.unpack('H', fd.read(2))[0] - extra_field_len = struct.unpack('H', fd.read(2))[0] - file_offset = (local_header_offset + FILE_NAME_OFFSET + - file_name_len + extra_field_len) - return file_offset - - -class ApkReader(object): - """A convenience class used to read the content of APK files. - - Its design is very similar to the one from zipfile.ZipFile, except - that its returns ApkZipInfo entries which provide a |file_offset| - property that can be used to know where a given file is located inside - the archive. - - It is also easy to mock for unit-testing (see MockApkReader in - apk_utils_unittest.py) without creating any files on disk. - - Usage is the following: - - Create an instance using a with statement (for proper unit-testing). - - Call ListEntries() to list all entries in the archive. This returns - a list of ApkZipInfo entries. - - Or call FindEntry() corresponding to a given path within the archive. - - For example: - with ApkReader(input_apk_path) as reader: - info = reader.FindEntry('lib/armeabi-v7a/libfoo.so') - if info.IsCompressed() or not info.IsElfFile(): - raise Exception('Invalid library path") - - The ApkZipInfo can be used to inspect the entry's metadata, or read its - content with the ReadAll() method. See its documentation for all details. - """ - def __init__(self, apk_path): - """Initialize instance.""" - self._zip_file = zipfile.ZipFile(apk_path, 'r') - self._path = apk_path - - def __enter__(self): - """Python context manager entry.""" - return self - - def __exit__(self, *kwargs): - """Python context manager exit.""" - self.Close() - - @property - def path(self): - """The corresponding input APK path.""" - return self._path - - def Close(self): - """Close the reader (and underlying ZipFile instance).""" - self._zip_file.close() - - def ListEntries(self): - """Return a list of ApkZipInfo entries for this APK.""" - result = [] - for info in self._zip_file.infolist(): - result.append(ApkZipInfo(self._zip_file, info)) - return result - - def FindEntry(self, file_path): - """Return an ApkZipInfo instance for a given archive file path. - - Args: - file_path: zip file path. - Return: - A new ApkZipInfo entry on success. - Raises: - KeyError on failure (entry not found). - """ - info = self._zip_file.getinfo(file_path) - return ApkZipInfo(self._zip_file, info) - - - -class ApkNativeLibraries(object): - """A class for the list of uncompressed shared libraries inside an APK. - - Create a new instance by passing the path to an input APK, then use - the FindLibraryByOffset() method to find the native shared library path - corresponding to a given file offset. - - GetAbiList() and GetLibrariesList() can also be used to inspect - the state of the instance. - """ - def __init__(self, apk_reader): - """Initialize instance. - - Args: - apk_reader: An ApkReader instance corresponding to the input APK. - """ - self._native_libs = [] - for entry in apk_reader.ListEntries(): - # Chromium uses so-called 'placeholder' native shared libraries - # that have a size of 0, and are only used to deal with bugs in - # older Android system releases (they are never loaded and cannot - # appear in stack traces). Ignore these here to avoid generating - # confusing results. - if entry.file_size == 0: - continue - - # Only uncompressed libraries can appear in stack traces. - if entry.IsCompressed(): - continue - - # Only consider files within lib/ and with a filename ending with .so - # at the moment. NOTE: Do not require a 'lib' prefix, since that would - # prevent finding the 'crazy.libXXX.so' libraries used by Chromium. - if (not entry.filename.startswith('lib/') or - not entry.filename.endswith('.so')): - continue - - lib_path = entry.filename - - self._native_libs.append( - (lib_path, entry.file_offset, entry.file_offset + entry.file_size)) - - def IsEmpty(self): - """Return true iff the list is empty.""" - return not bool(self._native_libs) - - def GetLibraries(self): - """Return the list of all library paths in this instance.""" - return sorted([x[0] for x in self._native_libs]) - - def GetDumpList(self): - """Retrieve full library map. - - Returns: - A list of (lib_path, file_offset, file_size) tuples, sorted - in increasing |file_offset| values. - """ - result = [] - for entry in self._native_libs: - lib_path, file_start, file_end = entry - result.append((lib_path, file_start, file_end - file_start)) - - return sorted(result, lambda x, y: cmp(x[1], y[1])) - - def FindLibraryByOffset(self, file_offset): - """Find the native library at a given file offset. - - Args: - file_offset: File offset within the original APK. - Returns: - Returns a (lib_path, lib_offset) tuple on success, or (None, 0) - on failure. Note that lib_path will omit the 'lib/$ABI/' prefix, - lib_offset is the adjustment of file_offset within the library. - """ - for lib_path, start_offset, end_offset in self._native_libs: - if file_offset >= start_offset and file_offset < end_offset: - return (lib_path, file_offset - start_offset) - - return (None, 0) - - -class ApkLibraryPathTranslator(object): - """Translates APK file paths + byte offsets into library path + offset. - - The purpose of this class is to translate a native shared library path - that points to an APK into a new device-specific path that points to a - native shared library, as if it was installed there. E.g.: - - ('/data/data/com.example.app-1/base.apk', 0x123be00) - - would be translated into: - - ('/data/data/com.example.app-1/base.apk!lib/libfoo.so', 0x3be00) - - If the original APK (installed as base.apk) contains an uncompressed shared - library under lib/armeabi-v7a/libfoo.so at offset 0x120000. - - Note that the virtual device path after the ! doesn't necessarily match - the path inside the .apk. This doesn't really matter for the rest of - the symbolization functions since only the file's base name can be used - to find the corresponding file on the host. - - Usage is the following: - - 1/ Create new instance. - - 2/ Call AddHostApk() one or several times to add the host path - of an APK, its package name, and device-installed named. - - 3/ Call TranslatePath() to translate a (path, offset) tuple corresponding - to an on-device APK, into the corresponding virtual device library - path and offset. - """ - - # Depending on the version of the system, a non-system APK might be installed - # on a path that looks like the following: - # - # * /data/..../-.apk, where is used to - # distinguish several versions of the APK during package updates. - # - # * /data/..../-/base.apk, where is a - # string of random ASCII characters following the dash after the - # package name. This serves as a way to distinguish the installation - # paths during package update, and randomize its final location - # (to prevent apps from hard-coding the paths to other apps). - # - # Note that the 'base.apk' name comes from the system. - # - # * /data/.../-/.apk, where - # is the same as above, and is the name of am app bundle - # split APK. - # - # System APKs are installed on paths that look like /system/app/Foo.apk - # but this class ignores them intentionally. - - # Compiler regular expression for the first format above. - _RE_APK_PATH_1 = re.compile( - r'/data/.*/(?P[A-Za-z0-9_.]+)-(?P[0-9]+)\.apk') - - # Compiled regular expression for the second and third formats above. - _RE_APK_PATH_2 = re.compile( - r'/data/.*/(?P[A-Za-z0-9_.]+)-(?P[^/]+)/' + - r'(?P.+\.apk)') - - def __init__(self): - """Initialize instance. Call AddHostApk() to add host apk file paths.""" - self._path_map = {} # Maps (package_name, apk_name) to host-side APK path. - self._libs_map = {} # Maps APK host path to ApkNativeLibrariesMap instance. - - def AddHostApk(self, package_name, native_libs, device_apk_name=None): - """Add a file path to the host APK search list. - - Args: - package_name: Corresponding apk package name. - native_libs: ApkNativeLibraries instance for the corresponding APK. - device_apk_name: Optional expected name of the installed APK on the - device. This is only useful when symbolizing app bundle that run on - Android L+. I.e. it will be ignored in other cases. - """ - if native_libs.IsEmpty(): - logging.debug('Ignoring host APK without any uncompressed native ' + - 'libraries: %s', device_apk_name) - return - - # If the APK name is not provided, use the default of 'base.apk'. This - # will be ignored if we find -.apk file paths - # in the input, but will work properly for Android L+, as long as we're - # not using Android app bundles. - device_apk_name = device_apk_name or 'base.apk' - - key = "%s/%s" % (package_name, device_apk_name) - if key in self._libs_map: - raise KeyError('There is already an APK associated with (%s)' % key) - - self._libs_map[key] = native_libs - - @staticmethod - def _MatchApkDeviceInstallPath(apk_path): - """Check whether a given path matches an installed APK device file path. - - Args: - apk_path: Device-specific file path. - Returns: - On success, a (package_name, apk_name) tuple. On failure, (None. None). - """ - m = ApkLibraryPathTranslator._RE_APK_PATH_1.match(apk_path) - if m: - return (m.group('package_name'), 'base.apk') - - m = ApkLibraryPathTranslator._RE_APK_PATH_2.match(apk_path) - if m: - return (m.group('package_name'), m.group('apk_name')) - - return (None, None) - - def TranslatePath(self, apk_path, apk_offset): - """Translate a potential apk file path + offset into library path + offset. - - Args: - apk_path: Library or apk file path on the device (e.g. - '/data/data/com.example.app-XSAHKSJH/base.apk'). - apk_offset: Byte offset within the library or apk. - - Returns: - a new (lib_path, lib_offset) tuple. If |apk_path| points to an APK, - then this function searches inside the corresponding host-side APKs - (added with AddHostApk() above) for the corresponding uncompressed - native shared library at |apk_offset|, if found, this returns a new - device-specific path corresponding to a virtual installation of said - library with an adjusted offset. - - Otherwise, just return the original (apk_path, apk_offset) values. - """ - if not apk_path.endswith('.apk'): - return (apk_path, apk_offset) - - apk_package, apk_name = self._MatchApkDeviceInstallPath(apk_path) - if not apk_package: - return (apk_path, apk_offset) - - key = '%s/%s' % (apk_package, apk_name) - native_libs = self._libs_map.get(key) - if not native_libs: - logging.debug('Unknown %s package', key) - return (apk_path, apk_offset) - - lib_name, new_offset = native_libs.FindLibraryByOffset(apk_offset) - if not lib_name: - logging.debug('Invalid offset in %s.apk package: %d', key, apk_offset) - return (apk_path, apk_offset) - - lib_name = os.path.basename(lib_name) - - # Some libraries are stored with a crazy. prefix inside the APK, this - # is done to prevent the PackageManager from extracting the libraries - # at installation time when running on pre Android M systems, where the - # system linker cannot load libraries directly from APKs. - crazy_prefix = 'crazy.' - if lib_name.startswith(crazy_prefix): - lib_name = lib_name[len(crazy_prefix):] - - # Put this in a fictional lib sub-directory for good measure. - new_path = '%s!lib/%s' % (apk_path, lib_name) - - return (new_path, new_offset) diff --git a/build/android/pylib/symbols/apk_native_libs_unittest.py b/build/android/pylib/symbols/apk_native_libs_unittest.py deleted file mode 100644 index 416918d8a112..000000000000 --- a/build/android/pylib/symbols/apk_native_libs_unittest.py +++ /dev/null @@ -1,396 +0,0 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import logging -import unittest - -from pylib.symbols import apk_native_libs - -# Mock ELF-like data -MOCK_ELF_DATA = '\x7fELFFFFFFFFFFFFFFFF' - -class MockApkZipInfo(object): - """A mock ApkZipInfo class, returned by MockApkReaderFactory instances.""" - def __init__(self, filename, file_size, compress_size, file_offset, - file_data): - self.filename = filename - self.file_size = file_size - self.compress_size = compress_size - self.file_offset = file_offset - self._data = file_data - - def __repr__(self): - """Convert to string for debugging.""" - return 'MockApkZipInfo["%s",size=%d,compressed=%d,offset=%d]' % ( - self.filename, self.file_size, self.compress_size, self.file_offset) - - def IsCompressed(self): - """Returns True iff the entry is compressed.""" - return self.file_size != self.compress_size - - def IsElfFile(self): - """Returns True iff the entry is an ELF file.""" - if not self._data or len(self._data) < 4: - return False - - return self._data[0:4] == '\x7fELF' - - -class MockApkReader(object): - """A mock ApkReader instance used during unit-testing. - - Do not use directly, but use a MockApkReaderFactory context, as in: - - with MockApkReaderFactory() as mock: - mock.AddTestEntry(file_path, file_size, compress_size, file_data) - ... - - # Actually returns the mock instance. - apk_reader = apk_native_libs.ApkReader('/some/path.apk') - """ - def __init__(self, apk_path='test.apk'): - """Initialize instance.""" - self._entries = [] - self._fake_offset = 0 - self._path = apk_path - - def __enter__(self): - return self - - def __exit__(self, *kwarg): - self.Close() - return - - @property - def path(self): - return self._path - - def AddTestEntry(self, filepath, file_size, compress_size, file_data): - """Add a new entry to the instance for unit-tests. - - Do not call this directly, use the AddTestEntry() method on the parent - MockApkReaderFactory instance. - - Args: - filepath: archive file path. - file_size: uncompressed file size in bytes. - compress_size: compressed size in bytes. - file_data: file data to be checked by IsElfFile() - - Note that file_data can be None, or that its size can be actually - smaller than |compress_size| when used during unit-testing. - """ - self._entries.append(MockApkZipInfo(filepath, file_size, compress_size, - self._fake_offset, file_data)) - self._fake_offset += compress_size - - def Close(self): # pylint: disable=no-self-use - """Close this reader instance.""" - return - - def ListEntries(self): - """Return a list of MockApkZipInfo instances for this input APK.""" - return self._entries - - def FindEntry(self, file_path): - """Find the MockApkZipInfo instance corresponds to a given file path.""" - for entry in self._entries: - if entry.filename == file_path: - return entry - raise KeyError('Could not find mock zip archive member for: ' + file_path) - - -class MockApkReaderTest(unittest.TestCase): - - def testEmpty(self): - with MockApkReader() as reader: - entries = reader.ListEntries() - self.assertTrue(len(entries) == 0) - with self.assertRaises(KeyError): - reader.FindEntry('non-existent-entry.txt') - - def testSingleEntry(self): - with MockApkReader() as reader: - reader.AddTestEntry('some-path/some-file', 20000, 12345, file_data=None) - entries = reader.ListEntries() - self.assertTrue(len(entries) == 1) - entry = entries[0] - self.assertEqual(entry.filename, 'some-path/some-file') - self.assertEqual(entry.file_size, 20000) - self.assertEqual(entry.compress_size, 12345) - self.assertTrue(entry.IsCompressed()) - - entry2 = reader.FindEntry('some-path/some-file') - self.assertEqual(entry, entry2) - - def testMultipleEntries(self): - with MockApkReader() as reader: - _ENTRIES = { - 'foo.txt': (1024, 1024, 'FooFooFoo'), - 'lib/bar/libcode.so': (16000, 3240, 1024, '\x7fELFFFFFFFFFFFF'), - } - for path, props in _ENTRIES.iteritems(): - reader.AddTestEntry(path, props[0], props[1], props[2]) - - entries = reader.ListEntries() - self.assertEqual(len(entries), len(_ENTRIES)) - for path, props in _ENTRIES.iteritems(): - entry = reader.FindEntry(path) - self.assertEqual(entry.filename, path) - self.assertEqual(entry.file_size, props[0]) - self.assertEqual(entry.compress_size, props[1]) - - -class ApkNativeLibrariesTest(unittest.TestCase): - - def setUp(self): - logging.getLogger().setLevel(logging.ERROR) - - def testEmptyApk(self): - with MockApkReader() as reader: - libs_map = apk_native_libs.ApkNativeLibraries(reader) - self.assertTrue(libs_map.IsEmpty()) - self.assertEqual(len(libs_map.GetLibraries()), 0) - lib_path, lib_offset = libs_map.FindLibraryByOffset(0) - self.assertIsNone(lib_path) - self.assertEqual(lib_offset, 0) - - def testSimpleApk(self): - with MockApkReader() as reader: - _MOCK_ENTRIES = [ - # Top-level library should be ignored. - ('libfoo.so', 1000, 1000, MOCK_ELF_DATA, False), - # Library not under lib/ should be ignored. - ('badlib/test-abi/libfoo2.so', 1001, 1001, MOCK_ELF_DATA, False), - # Library under lib// but without .so extension should be ignored. - ('lib/test-abi/libfoo4.so.1', 1003, 1003, MOCK_ELF_DATA, False), - # Library under lib// with .so suffix, but compressed -> ignored. - ('lib/test-abi/libfoo5.so', 1004, 1003, MOCK_ELF_DATA, False), - # First correct library - ('lib/test-abi/libgood1.so', 1005, 1005, MOCK_ELF_DATA, True), - # Second correct library: support sub-directories - ('lib/test-abi/subdir/libgood2.so', 1006, 1006, MOCK_ELF_DATA, True), - # Third correct library, no lib prefix required - ('lib/test-abi/crazy.libgood3.so', 1007, 1007, MOCK_ELF_DATA, True), - ] - file_offsets = [] - prev_offset = 0 - for ent in _MOCK_ENTRIES: - reader.AddTestEntry(ent[0], ent[1], ent[2], ent[3]) - file_offsets.append(prev_offset) - prev_offset += ent[2] - - libs_map = apk_native_libs.ApkNativeLibraries(reader) - self.assertFalse(libs_map.IsEmpty()) - self.assertEqual(libs_map.GetLibraries(), [ - 'lib/test-abi/crazy.libgood3.so', - 'lib/test-abi/libgood1.so', - 'lib/test-abi/subdir/libgood2.so', - ]) - - BIAS = 10 - for mock_ent, file_offset in zip(_MOCK_ENTRIES, file_offsets): - if mock_ent[4]: - lib_path, lib_offset = libs_map.FindLibraryByOffset( - file_offset + BIAS) - self.assertEqual(lib_path, mock_ent[0]) - self.assertEqual(lib_offset, BIAS) - - - def testMultiAbiApk(self): - with MockApkReader() as reader: - _MOCK_ENTRIES = [ - ('lib/abi1/libfoo.so', 1000, 1000, MOCK_ELF_DATA), - ('lib/abi2/libfoo.so', 1000, 1000, MOCK_ELF_DATA), - ] - for ent in _MOCK_ENTRIES: - reader.AddTestEntry(ent[0], ent[1], ent[2], ent[3]) - - libs_map = apk_native_libs.ApkNativeLibraries(reader) - self.assertFalse(libs_map.IsEmpty()) - self.assertEqual(libs_map.GetLibraries(), [ - 'lib/abi1/libfoo.so', 'lib/abi2/libfoo.so']) - - lib1_name, lib1_offset = libs_map.FindLibraryByOffset(10) - self.assertEqual(lib1_name, 'lib/abi1/libfoo.so') - self.assertEqual(lib1_offset, 10) - - lib2_name, lib2_offset = libs_map.FindLibraryByOffset(1000) - self.assertEqual(lib2_name, 'lib/abi2/libfoo.so') - self.assertEqual(lib2_offset, 0) - - -class MockApkNativeLibraries(apk_native_libs.ApkNativeLibraries): - """A mock ApkNativeLibraries instance that can be used as input to - ApkLibraryPathTranslator without creating an ApkReader instance. - - Create a new instance, then call AddTestEntry or AddTestEntries - as many times as necessary, before using it as a regular - ApkNativeLibraries instance. - """ - # pylint: disable=super-init-not-called - def __init__(self): - self._native_libs = [] - - # pylint: enable=super-init-not-called - - def AddTestEntry(self, lib_path, file_offset, file_size): - """Add a new test entry. - - Args: - entry: A tuple of (library-path, file-offset, file-size) values, - (e.g. ('lib/armeabi-v8a/libfoo.so', 0x10000, 0x2000)). - """ - self._native_libs.append((lib_path, file_offset, file_offset + file_size)) - - def AddTestEntries(self, entries): - """Add a list of new test entries. - - Args: - entries: A list of (library-path, file-offset, file-size) values. - """ - for entry in entries: - self.AddTestEntry(entry[0], entry[1], entry[2]) - - -class MockApkNativeLibrariesTest(unittest.TestCase): - - def testEmptyInstance(self): - mock = MockApkNativeLibraries() - self.assertTrue(mock.IsEmpty()) - self.assertEqual(mock.GetLibraries(), []) - self.assertEqual(mock.GetDumpList(), []) - - def testAddTestEntry(self): - mock = MockApkNativeLibraries() - mock.AddTestEntry('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000) - mock.AddTestEntry('lib/x86/libzoo.so', 0x10000, 0x10000) - mock.AddTestEntry('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000) - self.assertFalse(mock.IsEmpty()) - self.assertEqual(mock.GetLibraries(), ['lib/armeabi-v7a/libbar.so', - 'lib/armeabi-v7a/libfoo.so', - 'lib/x86/libzoo.so']) - self.assertEqual(mock.GetDumpList(), [ - ('lib/x86/libzoo.so', 0x10000, 0x10000), - ('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000), - ('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000), - ]) - - def testAddTestEntries(self): - mock = MockApkNativeLibraries() - mock.AddTestEntries([ - ('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000), - ('lib/x86/libzoo.so', 0x10000, 0x10000), - ('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000), - ]) - self.assertFalse(mock.IsEmpty()) - self.assertEqual(mock.GetLibraries(), ['lib/armeabi-v7a/libbar.so', - 'lib/armeabi-v7a/libfoo.so', - 'lib/x86/libzoo.so']) - self.assertEqual(mock.GetDumpList(), [ - ('lib/x86/libzoo.so', 0x10000, 0x10000), - ('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000), - ('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000), - ]) - - -class ApkLibraryPathTranslatorTest(unittest.TestCase): - - def _CheckUntranslated(self, translator, path, offset): - """Check that a given (path, offset) is not modified by translation.""" - self.assertEqual(translator.TranslatePath(path, offset), (path, offset)) - - - def _CheckTranslated(self, translator, path, offset, new_path, new_offset): - """Check that (path, offset) is translated into (new_path, new_offset).""" - self.assertEqual(translator.TranslatePath(path, offset), - (new_path, new_offset)) - - def testEmptyInstance(self): - translator = apk_native_libs.ApkLibraryPathTranslator() - self._CheckUntranslated( - translator, '/data/data/com.example.app-1/base.apk', 0x123456) - - def testSimpleApk(self): - mock_libs = MockApkNativeLibraries() - mock_libs.AddTestEntries([ - ('lib/test-abi/libfoo.so', 200, 2000), - ('lib/test-abi/libbar.so', 3200, 3000), - ('lib/test-abi/crazy.libzoo.so', 6200, 2000), - ]) - translator = apk_native_libs.ApkLibraryPathTranslator() - translator.AddHostApk('com.example.app', mock_libs) - - # Offset is within the first uncompressed library - self._CheckTranslated( - translator, - '/data/data/com.example.app-9.apk', 757, - '/data/data/com.example.app-9.apk!lib/libfoo.so', 557) - - # Offset is within the second compressed library. - self._CheckUntranslated( - translator, - '/data/data/com.example.app-9/base.apk', 2800) - - # Offset is within the third uncompressed library. - self._CheckTranslated( - translator, - '/data/data/com.example.app-1/base.apk', 3628, - '/data/data/com.example.app-1/base.apk!lib/libbar.so', 428) - - # Offset is within the fourth uncompressed library with crazy. prefix - self._CheckTranslated( - translator, - '/data/data/com.example.app-XX/base.apk', 6500, - '/data/data/com.example.app-XX/base.apk!lib/libzoo.so', 300) - - # Out-of-bounds apk offset. - self._CheckUntranslated( - translator, - '/data/data/com.example.app-1/base.apk', 10000) - - # Invalid package name. - self._CheckUntranslated( - translator, '/data/data/com.example2.app-1/base.apk', 757) - - # Invalid apk name. - self._CheckUntranslated( - translator, '/data/data/com.example.app-2/not-base.apk', 100) - - # Invalid file extensions. - self._CheckUntranslated( - translator, '/data/data/com.example.app-2/base', 100) - - self._CheckUntranslated( - translator, '/data/data/com.example.app-2/base.apk.dex', 100) - - def testBundleApks(self): - mock_libs1 = MockApkNativeLibraries() - mock_libs1.AddTestEntries([ - ('lib/test-abi/libfoo.so', 200, 2000), - ('lib/test-abi/libbbar.so', 3200, 3000), - ]) - mock_libs2 = MockApkNativeLibraries() - mock_libs2.AddTestEntries([ - ('lib/test-abi/libzoo.so', 200, 2000), - ('lib/test-abi/libtool.so', 3000, 4000), - ]) - translator = apk_native_libs.ApkLibraryPathTranslator() - translator.AddHostApk('com.example.app', mock_libs1, 'base-master.apk') - translator.AddHostApk('com.example.app', mock_libs2, 'feature-master.apk') - - self._CheckTranslated( - translator, - '/data/app/com.example.app-XUIYIUW/base-master.apk', 757, - '/data/app/com.example.app-XUIYIUW/base-master.apk!lib/libfoo.so', 557) - - self._CheckTranslated( - translator, - '/data/app/com.example.app-XUIYIUW/feature-master.apk', 3200, - '/data/app/com.example.app-XUIYIUW/feature-master.apk!lib/libtool.so', - 200) - - -if __name__ == '__main__': - unittest.main() diff --git a/build/android/pylib/symbols/deobfuscator.py b/build/android/pylib/symbols/deobfuscator.py index ffc23b87048b..710609854f2b 100644 --- a/build/android/pylib/symbols/deobfuscator.py +++ b/build/android/pylib/symbols/deobfuscator.py @@ -1,175 +1,50 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -import logging import os -import subprocess -import threading -import time -import uuid -from devil.utils import reraiser_thread from pylib import constants +from .expensive_line_transformer import ExpensiveLineTransformer +from .expensive_line_transformer import ExpensiveLineTransformerPool +_MINIMUM_TIMEOUT = 10.0 +_PER_LINE_TIMEOUT = .005 # Should be able to process 200 lines per second. +_PROCESS_START_TIMEOUT = 20.0 +_MAX_RESTARTS = 4 # Should be plenty unless tool is crashing on start-up. +_POOL_SIZE = 4 +_PASSTHROUH_ON_FAILURE = False -_MINIUMUM_TIMEOUT = 3.0 -_PER_LINE_TIMEOUT = .002 # Should be able to process 500 lines per second. -_PROCESS_START_TIMEOUT = 10.0 -_MAX_RESTARTS = 10 # Should be plenty unless tool is crashing on start-up. - -class Deobfuscator(object): +class Deobfuscator(ExpensiveLineTransformer): def __init__(self, mapping_path): + super().__init__(_PROCESS_START_TIMEOUT, _MINIMUM_TIMEOUT, + _PER_LINE_TIMEOUT) script_path = os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'android', 'stacktrace', 'java_deobfuscate.py') - cmd = [script_path, mapping_path] - # Allow only one thread to call TransformLines() at a time. - self._lock = threading.Lock() - # Ensure that only one thread attempts to kill self._proc in Close(). - self._close_lock = threading.Lock() - self._closed_called = False - # Assign to None so that attribute exists if Popen() throws. - self._proc = None - # Start process eagerly to hide start-up latency. - self._proc_start_time = time.time() - self._proc = subprocess.Popen( - cmd, bufsize=1, stdin=subprocess.PIPE, stdout=subprocess.PIPE, - close_fds=True) - - def IsClosed(self): - return self._closed_called or self._proc.returncode is not None - - def IsBusy(self): - return self._lock.locked() - - def IsReady(self): - return not self.IsClosed() and not self.IsBusy() - - def TransformLines(self, lines): - """Deobfuscates obfuscated names found in the given lines. - - If anything goes wrong (process crashes, timeout, etc), returns |lines|. - - Args: - lines: A list of strings without trailing newlines. - - Returns: - A list of strings without trailing newlines. - """ - if not lines: - return [] - - # Deobfuscated stacks contain more frames than obfuscated ones when method - # inlining occurs. To account for the extra output lines, keep reading until - # this eof_line token is reached. - eof_line = uuid.uuid4().hex - out_lines = [] - - def deobfuscate_reader(): - while True: - line = self._proc.stdout.readline() - # Return an empty string at EOF (when stdin is closed). - if not line: - break - line = line[:-1] - if line == eof_line: - break - out_lines.append(line) - - if self.IsBusy(): - logging.warning('deobfuscator: Having to wait for Java deobfuscation.') + self._command = [script_path, mapping_path] + self.start() - # Allow only one thread to operate at a time. - with self._lock: - if self.IsClosed(): - if not self._closed_called: - logging.warning('deobfuscator: Process exited with code=%d.', - self._proc.returncode) - self.Close() - return lines + @property + def name(self): + return "deobfuscator" - # TODO(agrieve): Can probably speed this up by only sending lines through - # that might contain an obfuscated name. - reader_thread = reraiser_thread.ReraiserThread(deobfuscate_reader) - reader_thread.start() + @property + def command(self): + return self._command - try: - self._proc.stdin.write('\n'.join(lines)) - self._proc.stdin.write('\n{}\n'.format(eof_line)) - self._proc.stdin.flush() - time_since_proc_start = time.time() - self._proc_start_time - timeout = (max(0, _PROCESS_START_TIMEOUT - time_since_proc_start) + - max(_MINIUMUM_TIMEOUT, len(lines) * _PER_LINE_TIMEOUT)) - reader_thread.join(timeout) - if self.IsClosed(): - logging.warning( - 'deobfuscator: Close() called by another thread during join().') - return lines - if reader_thread.is_alive(): - logging.error('deobfuscator: Timed out.') - self.Close() - return lines - return out_lines - except IOError: - logging.exception('deobfuscator: Exception during java_deobfuscate') - self.Close() - return lines - def Close(self): - with self._close_lock: - needs_closing = not self.IsClosed() - self._closed_called = True - - if needs_closing: - self._proc.stdin.close() - self._proc.kill() - self._proc.wait() - - def __del__(self): - # self._proc is None when Popen() fails. - if not self._closed_called and self._proc: - logging.error('deobfuscator: Forgot to Close()') - self.Close() - - -class DeobfuscatorPool(object): - # As of Sep 2017, each instance requires about 500MB of RAM, as measured by: - # /usr/bin/time -v build/android/stacktrace/java_deobfuscate.py \ - # out/Release/apks/ChromePublic.apk.mapping - def __init__(self, mapping_path, pool_size=4): - self._mapping_path = mapping_path - self._pool = [Deobfuscator(mapping_path) for _ in xrange(pool_size)] - # Allow only one thread to select from the pool at a time. - self._lock = threading.Lock() - self._num_restarts = 0 - - def TransformLines(self, lines): - with self._lock: - assert self._pool, 'TransformLines() called on a closed DeobfuscatorPool.' - - # De-obfuscation is broken. - if self._num_restarts == _MAX_RESTARTS: - raise Exception('Deobfuscation seems broken.') - - # Restart any closed Deobfuscators. - for i, d in enumerate(self._pool): - if d.IsClosed(): - logging.warning('deobfuscator: Restarting closed instance.') - self._pool[i] = Deobfuscator(self._mapping_path) - self._num_restarts += 1 - if self._num_restarts == _MAX_RESTARTS: - logging.warning('deobfuscator: MAX_RESTARTS reached.') - - selected = next((x for x in self._pool if x.IsReady()), self._pool[0]) - # Rotate the order so that next caller will not choose the same one. - self._pool.remove(selected) - self._pool.append(selected) - - return selected.TransformLines(lines) - - def Close(self): - with self._lock: - for d in self._pool: - d.Close() - self._pool = None +class DeobfuscatorPool(ExpensiveLineTransformerPool): + def __init__(self, mapping_path): + # As of Sep 2017, each instance requires about 500MB of RAM, as measured by: + # /usr/bin/time -v build/android/stacktrace/java_deobfuscate.py \ + # out/Release/apks/ChromePublic.apk.mapping + self.mapping_path = mapping_path + super().__init__(_MAX_RESTARTS, _POOL_SIZE, _PASSTHROUH_ON_FAILURE) + + @property + def name(self): + return "deobfuscator-pool" + + def CreateTransformer(self): + return Deobfuscator(self.mapping_path) diff --git a/build/android/pylib/symbols/elf_symbolizer.py b/build/android/pylib/symbols/elf_symbolizer.py deleted file mode 100644 index 1f2f91825553..000000000000 --- a/build/android/pylib/symbols/elf_symbolizer.py +++ /dev/null @@ -1,487 +0,0 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import collections -import datetime -import logging -import multiprocessing -import os -import posixpath -import Queue -import re -import subprocess -import sys -import threading -import time - - -# addr2line builds a possibly infinite memory cache that can exhaust -# the computer's memory if allowed to grow for too long. This constant -# controls how many lookups we do before restarting the process. 4000 -# gives near peak performance without extreme memory usage. -ADDR2LINE_RECYCLE_LIMIT = 4000 - - -ELF_MAGIC = '\x7f\x45\x4c\x46' - - -def ContainsElfMagic(file_path): - if os.path.getsize(file_path) < 4: - return False - try: - with open(file_path, 'r') as f: - b = f.read(4) - return b == ELF_MAGIC - except IOError: - return False - - -class ELFSymbolizer(object): - """An uber-fast (multiprocessing, pipelined and asynchronous) ELF symbolizer. - - This class is a frontend for addr2line (part of GNU binutils), designed to - symbolize batches of large numbers of symbols for a given ELF file. It - supports sharding symbolization against many addr2line instances and - pipelining of multiple requests per each instance (in order to hide addr2line - internals and OS pipe latencies). - - The interface exhibited by this class is a very simple asynchronous interface, - which is based on the following three methods: - - SymbolizeAsync(): used to request (enqueue) resolution of a given address. - - The |callback| method: used to communicated back the symbol information. - - Join(): called to conclude the batch to gather the last outstanding results. - In essence, before the Join method returns, this class will have issued as - many callbacks as the number of SymbolizeAsync() calls. In this regard, note - that due to multiprocess sharding, callbacks can be delivered out of order. - - Some background about addr2line: - - it is invoked passing the elf path in the cmdline, piping the addresses in - its stdin and getting results on its stdout. - - it has pretty large response times for the first requests, but it - works very well in streaming mode once it has been warmed up. - - it doesn't scale by itself (on more cores). However, spawning multiple - instances at the same time on the same file is pretty efficient as they - keep hitting the pagecache and become mostly CPU bound. - - it might hang or crash, mostly for OOM. This class deals with both of these - problems. - - Despite the "scary" imports and the multi* words above, (almost) no multi- - threading/processing is involved from the python viewpoint. Concurrency - here is achieved by spawning several addr2line subprocesses and handling their - output pipes asynchronously. Therefore, all the code here (with the exception - of the Queue instance in Addr2Line) should be free from mind-blowing - thread-safety concerns. - - The multiprocess sharding works as follows: - The symbolizer tries to use the lowest number of addr2line instances as - possible (with respect of |max_concurrent_jobs|) and enqueue all the requests - in a single addr2line instance. For few symbols (i.e. dozens) sharding isn't - worth the startup cost. - The multiprocess logic kicks in as soon as the queues for the existing - instances grow. Specifically, once all the existing instances reach the - |max_queue_size| bound, a new addr2line instance is kicked in. - In the case of a very eager producer (i.e. all |max_concurrent_jobs| instances - have a backlog of |max_queue_size|), back-pressure is applied on the caller by - blocking the SymbolizeAsync method. - - This module has been deliberately designed to be dependency free (w.r.t. of - other modules in this project), to allow easy reuse in external projects. - """ - - def __init__(self, elf_file_path, addr2line_path, callback, inlines=False, - max_concurrent_jobs=None, addr2line_timeout=30, max_queue_size=50, - source_root_path=None, strip_base_path=None): - """Args: - elf_file_path: path of the elf file to be symbolized. - addr2line_path: path of the toolchain's addr2line binary. - callback: a callback which will be invoked for each resolved symbol with - the two args (sym_info, callback_arg). The former is an instance of - |ELFSymbolInfo| and contains the symbol information. The latter is an - embedder-provided argument which is passed to SymbolizeAsync(). - inlines: when True, the ELFSymbolInfo will contain also the details about - the outer inlining functions. When False, only the innermost function - will be provided. - max_concurrent_jobs: Max number of addr2line instances spawned. - Parallelize responsibly, addr2line is a memory and I/O monster. - max_queue_size: Max number of outstanding requests per addr2line instance. - addr2line_timeout: Max time (in seconds) to wait for a addr2line response. - After the timeout, the instance will be considered hung and respawned. - source_root_path: In some toolchains only the name of the source file is - is output, without any path information; disambiguation searches - through the source directory specified by |source_root_path| argument - for files whose name matches, adding the full path information to the - output. For example, if the toolchain outputs "unicode.cc" and there - is a file called "unicode.cc" located under |source_root_path|/foo, - the tool will replace "unicode.cc" with - "|source_root_path|/foo/unicode.cc". If there are multiple files with - the same name, disambiguation will fail because the tool cannot - determine which of the files was the source of the symbol. - strip_base_path: Rebases the symbols source paths onto |source_root_path| - (i.e replace |strip_base_path| with |source_root_path). - """ - assert(os.path.isfile(addr2line_path)), 'Cannot find ' + addr2line_path - self.elf_file_path = elf_file_path - self.addr2line_path = addr2line_path - self.callback = callback - self.inlines = inlines - self.max_concurrent_jobs = (max_concurrent_jobs or - min(multiprocessing.cpu_count(), 4)) - self.max_queue_size = max_queue_size - self.addr2line_timeout = addr2line_timeout - self.requests_counter = 0 # For generating monotonic request IDs. - self._a2l_instances = [] # Up to |max_concurrent_jobs| _Addr2Line inst. - - # If necessary, create disambiguation lookup table - self.disambiguate = source_root_path is not None - self.disambiguation_table = {} - self.strip_base_path = strip_base_path - if self.disambiguate: - self.source_root_path = os.path.abspath(source_root_path) - self._CreateDisambiguationTable() - - # Create one addr2line instance. More instances will be created on demand - # (up to |max_concurrent_jobs|) depending on the rate of the requests. - self._CreateNewA2LInstance() - - def SymbolizeAsync(self, addr, callback_arg=None): - """Requests symbolization of a given address. - - This method is not guaranteed to return immediately. It generally does, but - in some scenarios (e.g. all addr2line instances have full queues) it can - block to create back-pressure. - - Args: - addr: address to symbolize. - callback_arg: optional argument which will be passed to the |callback|.""" - assert isinstance(addr, int) - - # Process all the symbols that have been resolved in the meanwhile. - # Essentially, this drains all the addr2line(s) out queues. - for a2l_to_purge in self._a2l_instances: - a2l_to_purge.ProcessAllResolvedSymbolsInQueue() - a2l_to_purge.RecycleIfNecessary() - - # Find the best instance according to this logic: - # 1. Find an existing instance with the shortest queue. - # 2. If all of instances' queues are full, but there is room in the pool, - # (i.e. < |max_concurrent_jobs|) create a new instance. - # 3. If there were already |max_concurrent_jobs| instances and all of them - # had full queues, make back-pressure. - - # 1. - def _SortByQueueSizeAndReqID(a2l): - return (a2l.queue_size, a2l.first_request_id) - a2l = min(self._a2l_instances, key=_SortByQueueSizeAndReqID) - - # 2. - if (a2l.queue_size >= self.max_queue_size and - len(self._a2l_instances) < self.max_concurrent_jobs): - a2l = self._CreateNewA2LInstance() - - # 3. - if a2l.queue_size >= self.max_queue_size: - a2l.WaitForNextSymbolInQueue() - - a2l.EnqueueRequest(addr, callback_arg) - - def WaitForIdle(self): - """Waits for all the outstanding requests to complete.""" - for a2l in self._a2l_instances: - a2l.WaitForIdle() - - def Join(self): - """Waits for all the outstanding requests to complete and terminates.""" - for a2l in self._a2l_instances: - a2l.WaitForIdle() - a2l.Terminate() - - def _CreateNewA2LInstance(self): - assert len(self._a2l_instances) < self.max_concurrent_jobs - a2l = ELFSymbolizer.Addr2Line(self) - self._a2l_instances.append(a2l) - return a2l - - def _CreateDisambiguationTable(self): - """ Non-unique file names will result in None entries""" - start_time = time.time() - logging.info('Collecting information about available source files...') - self.disambiguation_table = {} - - for root, _, filenames in os.walk(self.source_root_path): - for f in filenames: - self.disambiguation_table[f] = os.path.join(root, f) if (f not in - self.disambiguation_table) else None - logging.info('Finished collecting information about ' - 'possible files (took %.1f s).', - (time.time() - start_time)) - - - class Addr2Line(object): - """A python wrapper around an addr2line instance. - - The communication with the addr2line process looks as follows: - [STDIN] [STDOUT] (from addr2line's viewpoint) - > f001111 - > f002222 - < Symbol::Name(foo, bar) for f001111 - < /path/to/source/file.c:line_number - > f003333 - < Symbol::Name2() for f002222 - < /path/to/source/file.c:line_number - < Symbol::Name3() for f003333 - < /path/to/source/file.c:line_number - """ - - SYM_ADDR_RE = re.compile(r'([^:]+):(\?|\d+).*') - - def __init__(self, symbolizer): - self._symbolizer = symbolizer - self._lib_file_name = posixpath.basename(symbolizer.elf_file_path) - - # The request queue (i.e. addresses pushed to addr2line's stdin and not - # yet retrieved on stdout) - self._request_queue = collections.deque() - - # This is essentially len(self._request_queue). It has been optimized to a - # separate field because turned out to be a perf hot-spot. - self.queue_size = 0 - - # Keep track of the number of symbols a process has processed to - # avoid a single process growing too big and using all the memory. - self._processed_symbols_count = 0 - - # Objects required to handle the addr2line subprocess. - self._proc = None # Subprocess.Popen(...) instance. - self._thread = None # Threading.thread instance. - self._out_queue = None # Queue.Queue instance (for buffering a2l stdout). - self._RestartAddr2LineProcess() - - def EnqueueRequest(self, addr, callback_arg): - """Pushes an address to addr2line's stdin (and keeps track of it).""" - self._symbolizer.requests_counter += 1 # For global "age" of requests. - req_idx = self._symbolizer.requests_counter - self._request_queue.append((addr, callback_arg, req_idx)) - self.queue_size += 1 - self._WriteToA2lStdin(addr) - - def WaitForIdle(self): - """Waits until all the pending requests have been symbolized.""" - while self.queue_size > 0: - self.WaitForNextSymbolInQueue() - - def WaitForNextSymbolInQueue(self): - """Waits for the next pending request to be symbolized.""" - if not self.queue_size: - return - - # This outer loop guards against a2l hanging (detecting stdout timeout). - while True: - start_time = datetime.datetime.now() - timeout = datetime.timedelta(seconds=self._symbolizer.addr2line_timeout) - - # The inner loop guards against a2l crashing (checking if it exited). - while datetime.datetime.now() - start_time < timeout: - # poll() returns !None if the process exited. a2l should never exit. - if self._proc.poll(): - logging.warning('addr2line crashed, respawning (lib: %s).', - self._lib_file_name) - self._RestartAddr2LineProcess() - # TODO(primiano): the best thing to do in this case would be - # shrinking the pool size as, very likely, addr2line is crashed - # due to low memory (and the respawned one will die again soon). - - try: - lines = self._out_queue.get(block=True, timeout=0.25) - except Queue.Empty: - # On timeout (1/4 s.) repeat the inner loop and check if either the - # addr2line process did crash or we waited its output for too long. - continue - - # In nominal conditions, we get straight to this point. - self._ProcessSymbolOutput(lines) - return - - # If this point is reached, we waited more than |addr2line_timeout|. - logging.warning('Hung addr2line process, respawning (lib: %s).', - self._lib_file_name) - self._RestartAddr2LineProcess() - - def ProcessAllResolvedSymbolsInQueue(self): - """Consumes all the addr2line output lines produced (without blocking).""" - if not self.queue_size: - return - while True: - try: - lines = self._out_queue.get_nowait() - except Queue.Empty: - break - self._ProcessSymbolOutput(lines) - - def RecycleIfNecessary(self): - """Restarts the process if it has been used for too long. - - A long running addr2line process will consume excessive amounts - of memory without any gain in performance.""" - if self._processed_symbols_count >= ADDR2LINE_RECYCLE_LIMIT: - self._RestartAddr2LineProcess() - - - def Terminate(self): - """Kills the underlying addr2line process. - - The poller |_thread| will terminate as well due to the broken pipe.""" - try: - self._proc.kill() - self._proc.communicate() # Essentially wait() without risking deadlock. - except Exception: # pylint: disable=broad-except - # An exception while terminating? How interesting. - pass - self._proc = None - - def _WriteToA2lStdin(self, addr): - self._proc.stdin.write('%s\n' % hex(addr)) - if self._symbolizer.inlines: - # In the case of inlines we output an extra blank line, which causes - # addr2line to emit a (??,??:0) tuple that we use as a boundary marker. - self._proc.stdin.write('\n') - self._proc.stdin.flush() - - def _ProcessSymbolOutput(self, lines): - """Parses an addr2line symbol output and triggers the client callback.""" - (_, callback_arg, _) = self._request_queue.popleft() - self.queue_size -= 1 - - innermost_sym_info = None - sym_info = None - for (line1, line2) in lines: - prev_sym_info = sym_info - name = line1 if not line1.startswith('?') else None - source_path = None - source_line = None - m = ELFSymbolizer.Addr2Line.SYM_ADDR_RE.match(line2) - if m: - if not m.group(1).startswith('?'): - source_path = m.group(1) - if not m.group(2).startswith('?'): - source_line = int(m.group(2)) - else: - logging.warning('Got invalid symbol path from addr2line: %s', line2) - - # In case disambiguation is on, and needed - was_ambiguous = False - disambiguated = False - if self._symbolizer.disambiguate: - if source_path and not posixpath.isabs(source_path): - path = self._symbolizer.disambiguation_table.get(source_path) - was_ambiguous = True - disambiguated = path is not None - source_path = path if disambiguated else source_path - - # Use absolute paths (so that paths are consistent, as disambiguation - # uses absolute paths) - if source_path and not was_ambiguous: - source_path = os.path.abspath(source_path) - - if source_path and self._symbolizer.strip_base_path: - # Strip the base path - source_path = re.sub('^' + self._symbolizer.strip_base_path, - self._symbolizer.source_root_path or '', source_path) - - sym_info = ELFSymbolInfo(name, source_path, source_line, was_ambiguous, - disambiguated) - if prev_sym_info: - prev_sym_info.inlined_by = sym_info - if not innermost_sym_info: - innermost_sym_info = sym_info - - self._processed_symbols_count += 1 - self._symbolizer.callback(innermost_sym_info, callback_arg) - - def _RestartAddr2LineProcess(self): - if self._proc: - self.Terminate() - - # The only reason of existence of this Queue (and the corresponding - # Thread below) is the lack of a subprocess.stdout.poll_avail_lines(). - # Essentially this is a pipe able to extract a couple of lines atomically. - self._out_queue = Queue.Queue() - - # Start the underlying addr2line process in line buffered mode. - - cmd = [self._symbolizer.addr2line_path, '--functions', '--demangle', - '--exe=' + self._symbolizer.elf_file_path] - if self._symbolizer.inlines: - cmd += ['--inlines'] - self._proc = subprocess.Popen(cmd, bufsize=1, stdout=subprocess.PIPE, - stdin=subprocess.PIPE, stderr=sys.stderr, close_fds=True) - - # Start the poller thread, which simply moves atomically the lines read - # from the addr2line's stdout to the |_out_queue|. - self._thread = threading.Thread( - target=ELFSymbolizer.Addr2Line.StdoutReaderThread, - args=(self._proc.stdout, self._out_queue, self._symbolizer.inlines)) - self._thread.daemon = True # Don't prevent early process exit. - self._thread.start() - - self._processed_symbols_count = 0 - - # Replay the pending requests on the new process (only for the case - # of a hung addr2line timing out during the game). - for (addr, _, _) in self._request_queue: - self._WriteToA2lStdin(addr) - - @staticmethod - def StdoutReaderThread(process_pipe, queue, inlines): - """The poller thread fn, which moves the addr2line stdout to the |queue|. - - This is the only piece of code not running on the main thread. It merely - writes to a Queue, which is thread-safe. In the case of inlines, it - detects the ??,??:0 marker and sends the lines atomically, such that the - main thread always receives all the lines corresponding to one symbol in - one shot.""" - try: - lines_for_one_symbol = [] - while True: - line1 = process_pipe.readline().rstrip('\r\n') - line2 = process_pipe.readline().rstrip('\r\n') - if not line1 or not line2: - break - inline_has_more_lines = inlines and (len(lines_for_one_symbol) == 0 or - (line1 != '??' and line2 != '??:0')) - if not inlines or inline_has_more_lines: - lines_for_one_symbol += [(line1, line2)] - if inline_has_more_lines: - continue - queue.put(lines_for_one_symbol) - lines_for_one_symbol = [] - process_pipe.close() - - # Every addr2line processes will die at some point, please die silently. - except (IOError, OSError): - pass - - @property - def first_request_id(self): - """Returns the request_id of the oldest pending request in the queue.""" - return self._request_queue[0][2] if self._request_queue else 0 - - -class ELFSymbolInfo(object): - """The result of the symbolization passed as first arg. of each callback.""" - - def __init__(self, name, source_path, source_line, was_ambiguous=False, - disambiguated=False): - """All the fields here can be None (if addr2line replies with '??').""" - self.name = name - self.source_path = source_path - self.source_line = source_line - # In the case of |inlines|=True, the |inlined_by| points to the outer - # function inlining the current one (and so on, to form a chain). - self.inlined_by = None - self.disambiguated = disambiguated - self.was_ambiguous = was_ambiguous - - def __str__(self): - return '%s [%s:%d]' % ( - self.name or '??', self.source_path or '??', self.source_line or 0) diff --git a/build/android/pylib/symbols/elf_symbolizer_unittest.py b/build/android/pylib/symbols/elf_symbolizer_unittest.py deleted file mode 100755 index 765b5989cb5b..000000000000 --- a/build/android/pylib/symbols/elf_symbolizer_unittest.py +++ /dev/null @@ -1,196 +0,0 @@ -#!/usr/bin/env python -# Copyright 2014 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import functools -import logging -import os -import unittest - -from pylib.symbols import elf_symbolizer -from pylib.symbols import mock_addr2line - - -_MOCK_A2L_PATH = os.path.join(os.path.dirname(mock_addr2line.__file__), - 'mock_addr2line') -_INCOMPLETE_MOCK_ADDR = 1024 * 1024 -_UNKNOWN_MOCK_ADDR = 2 * 1024 * 1024 -_INLINE_MOCK_ADDR = 3 * 1024 * 1024 - - -class ELFSymbolizerTest(unittest.TestCase): - def setUp(self): - self._callback = functools.partial( - ELFSymbolizerTest._SymbolizeCallback, self) - self._resolved_addresses = set() - # Mute warnings, we expect them due to the crash/hang tests. - logging.getLogger().setLevel(logging.ERROR) - - def testParallelism1(self): - self._RunTest(max_concurrent_jobs=1, num_symbols=100) - - def testParallelism4(self): - self._RunTest(max_concurrent_jobs=4, num_symbols=100) - - def testParallelism8(self): - self._RunTest(max_concurrent_jobs=8, num_symbols=100) - - def testCrash(self): - os.environ['MOCK_A2L_CRASH_EVERY'] = '99' - self._RunTest(max_concurrent_jobs=1, num_symbols=100) - os.environ['MOCK_A2L_CRASH_EVERY'] = '0' - - def testHang(self): - os.environ['MOCK_A2L_HANG_EVERY'] = '99' - self._RunTest(max_concurrent_jobs=1, num_symbols=100) - os.environ['MOCK_A2L_HANG_EVERY'] = '0' - - def testInlines(self): - """Stimulate the inline processing logic.""" - symbolizer = elf_symbolizer.ELFSymbolizer( - elf_file_path='/path/doesnt/matter/mock_lib1.so', - addr2line_path=_MOCK_A2L_PATH, - callback=self._callback, - inlines=True, - max_concurrent_jobs=4) - - for addr in xrange(1000): - exp_inline = False - exp_unknown = False - - # First 100 addresses with inlines. - if addr < 100: - addr += _INLINE_MOCK_ADDR - exp_inline = True - - # Followed by 100 without inlines. - elif addr < 200: - pass - - # Followed by 100 interleaved inlines and not inlines. - elif addr < 300: - if addr & 1: - addr += _INLINE_MOCK_ADDR - exp_inline = True - - # Followed by 100 interleaved inlines and unknonwn. - elif addr < 400: - if addr & 1: - addr += _INLINE_MOCK_ADDR - exp_inline = True - else: - addr += _UNKNOWN_MOCK_ADDR - exp_unknown = True - - exp_name = 'mock_sym_for_addr_%d' % addr if not exp_unknown else None - exp_source_path = 'mock_src/mock_lib1.so.c' if not exp_unknown else None - exp_source_line = addr if not exp_unknown else None - cb_arg = (addr, exp_name, exp_source_path, exp_source_line, exp_inline) - symbolizer.SymbolizeAsync(addr, cb_arg) - - symbolizer.Join() - - def testIncompleteSyminfo(self): - """Stimulate the symbol-not-resolved logic.""" - symbolizer = elf_symbolizer.ELFSymbolizer( - elf_file_path='/path/doesnt/matter/mock_lib1.so', - addr2line_path=_MOCK_A2L_PATH, - callback=self._callback, - max_concurrent_jobs=1) - - # Test symbols with valid name but incomplete path. - addr = _INCOMPLETE_MOCK_ADDR - exp_name = 'mock_sym_for_addr_%d' % addr - exp_source_path = None - exp_source_line = None - cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False) - symbolizer.SymbolizeAsync(addr, cb_arg) - - # Test symbols with no name or sym info. - addr = _UNKNOWN_MOCK_ADDR - exp_name = None - exp_source_path = None - exp_source_line = None - cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False) - symbolizer.SymbolizeAsync(addr, cb_arg) - - symbolizer.Join() - - def testWaitForIdle(self): - symbolizer = elf_symbolizer.ELFSymbolizer( - elf_file_path='/path/doesnt/matter/mock_lib1.so', - addr2line_path=_MOCK_A2L_PATH, - callback=self._callback, - max_concurrent_jobs=1) - - # Test symbols with valid name but incomplete path. - addr = _INCOMPLETE_MOCK_ADDR - exp_name = 'mock_sym_for_addr_%d' % addr - exp_source_path = None - exp_source_line = None - cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False) - symbolizer.SymbolizeAsync(addr, cb_arg) - symbolizer.WaitForIdle() - - # Test symbols with no name or sym info. - addr = _UNKNOWN_MOCK_ADDR - exp_name = None - exp_source_path = None - exp_source_line = None - cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False) - symbolizer.SymbolizeAsync(addr, cb_arg) - symbolizer.Join() - - def _RunTest(self, max_concurrent_jobs, num_symbols): - symbolizer = elf_symbolizer.ELFSymbolizer( - elf_file_path='/path/doesnt/matter/mock_lib1.so', - addr2line_path=_MOCK_A2L_PATH, - callback=self._callback, - max_concurrent_jobs=max_concurrent_jobs, - addr2line_timeout=0.5) - - for addr in xrange(num_symbols): - exp_name = 'mock_sym_for_addr_%d' % addr - exp_source_path = 'mock_src/mock_lib1.so.c' - exp_source_line = addr - cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False) - symbolizer.SymbolizeAsync(addr, cb_arg) - - symbolizer.Join() - - # Check that all the expected callbacks have been received. - for addr in xrange(num_symbols): - self.assertIn(addr, self._resolved_addresses) - self._resolved_addresses.remove(addr) - - # Check for unexpected callbacks. - self.assertEqual(len(self._resolved_addresses), 0) - - def _SymbolizeCallback(self, sym_info, cb_arg): - self.assertTrue(isinstance(sym_info, elf_symbolizer.ELFSymbolInfo)) - self.assertTrue(isinstance(cb_arg, tuple)) - self.assertEqual(len(cb_arg), 5) - - # Unpack expectations from the callback extra argument. - (addr, exp_name, exp_source_path, exp_source_line, exp_inlines) = cb_arg - if exp_name is None: - self.assertIsNone(sym_info.name) - else: - self.assertTrue(sym_info.name.startswith(exp_name)) - self.assertEqual(sym_info.source_path, exp_source_path) - self.assertEqual(sym_info.source_line, exp_source_line) - - if exp_inlines: - self.assertEqual(sym_info.name, exp_name + '_inner') - self.assertEqual(sym_info.inlined_by.name, exp_name + '_middle') - self.assertEqual(sym_info.inlined_by.inlined_by.name, - exp_name + '_outer') - - # Check against duplicate callbacks. - self.assertNotIn(addr, self._resolved_addresses) - self._resolved_addresses.add(addr) - - -if __name__ == '__main__': - unittest.main() diff --git a/build/android/pylib/symbols/expensive_line_transformer.py b/build/android/pylib/symbols/expensive_line_transformer.py new file mode 100644 index 000000000000..08cbe52a673e --- /dev/null +++ b/build/android/pylib/symbols/expensive_line_transformer.py @@ -0,0 +1,233 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +from abc import ABC, abstractmethod +import logging +import subprocess +import threading +import time +import uuid + +from devil.utils import reraiser_thread + + +class ExpensiveLineTransformer(ABC): + def __init__(self, process_start_timeout, minimum_timeout, per_line_timeout): + self._process_start_timeout = process_start_timeout + self._minimum_timeout = minimum_timeout + self._per_line_timeout = per_line_timeout + self._started = False + # Allow only one thread to call TransformLines() at a time. + self._lock = threading.Lock() + # Ensure that only one thread attempts to kill self._proc in Close(). + self._close_lock = threading.Lock() + self._closed_called = False + # Assign to None so that attribute exists if Popen() throws. + self._proc = None + # Start process eagerly to hide start-up latency. + self._proc_start_time = None + + def start(self): + # delay the start of the process, to allow the initialization of the + # descendant classes first. + if self._started: + logging.error('%s: Trying to start an already started command', self.name) + return + + # Start process eagerly to hide start-up latency. + self._proc_start_time = time.time() + + if not self.command: + logging.error('%s: No command available', self.name) + return + + self._proc = subprocess.Popen(self.command, + bufsize=1, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + universal_newlines=True, + close_fds=True) + self._started = True + + def IsClosed(self): + return (not self._started or self._closed_called + or self._proc.returncode is not None) + + def IsBusy(self): + return self._lock.locked() + + def IsReady(self): + return self._started and not self.IsClosed() and not self.IsBusy() + + def TransformLines(self, lines): + """Symbolizes names found in the given lines. + + If anything goes wrong (process crashes, timeout, etc), returns |lines|. + + Args: + lines: A list of strings without trailing newlines. + + Returns: + A list of strings without trailing newlines. + """ + if not lines: + return [] + + # symbolized output contain more lines than the input, as the symbolized + # stacktraces will be added. To account for the extra output lines, keep + # reading until this eof_line token is reached. Using a format that will + # be considered a "useful line" without modifying its output by + # third_party/android_platform/development/scripts/stack_core.py + eof_line = self.getEofLine() + out_lines = [] + + def _reader(): + while True: + line = self._proc.stdout.readline() + # Return an empty string at EOF (when stdin is closed). + if not line: + break + line = line[:-1] + if line == eof_line: + break + out_lines.append(line) + + if self.IsBusy(): + logging.warning('%s: Having to wait for transformation.', self.name) + + # Allow only one thread to operate at a time. + with self._lock: + if self.IsClosed(): + if self._started and not self._closed_called: + logging.warning('%s: Process exited with code=%d.', self.name, + self._proc.returncode) + self.Close() + return lines + + reader_thread = reraiser_thread.ReraiserThread(_reader) + reader_thread.start() + + try: + self._proc.stdin.write('\n'.join(lines)) + self._proc.stdin.write('\n{}\n'.format(eof_line)) + self._proc.stdin.flush() + time_since_proc_start = time.time() - self._proc_start_time + timeout = (max(0, self._process_start_timeout - time_since_proc_start) + + max(self._minimum_timeout, + len(lines) * self._per_line_timeout)) + reader_thread.join(timeout) + if self.IsClosed(): + logging.warning('%s: Close() called by another thread during join().', + self.name) + return lines + if reader_thread.is_alive(): + logging.error('%s: Timed out after %f seconds with input:', self.name, + timeout) + for l in lines: + logging.error(l) + logging.error(eof_line) + logging.error('%s: End of timed out input.', self.name) + logging.error('%s: Timed out output was:', self.name) + for l in out_lines: + logging.error(l) + logging.error('%s: End of timed out output.', self.name) + self.Close() + return lines + return out_lines + except IOError: + logging.exception('%s: Exception during transformation', self.name) + self.Close() + return lines + + def Close(self): + with self._close_lock: + needs_closing = not self.IsClosed() + self._closed_called = True + + if needs_closing: + self._proc.stdin.close() + self._proc.kill() + self._proc.wait() + + def __del__(self): + # self._proc is None when Popen() fails. + if not self._closed_called and self._proc: + logging.error('%s: Forgot to Close()', self.name) + self.Close() + + @property + @abstractmethod + def name(self): + ... + + @property + @abstractmethod + def command(self): + ... + + @staticmethod + def getEofLine(): + # Use a format that will be considered a "useful line" without modifying its + # output by third_party/android_platform/development/scripts/stack_core.py + return "Generic useful log header: \'{}\'".format(uuid.uuid4().hex) + + +class ExpensiveLineTransformerPool(ABC): + def __init__(self, max_restarts, pool_size, passthrough_on_failure): + self._max_restarts = max_restarts + self._pool = [self.CreateTransformer() for _ in range(pool_size)] + self._passthrough_on_failure = passthrough_on_failure + # Allow only one thread to select from the pool at a time. + self._lock = threading.Lock() + self._num_restarts = 0 + + def __enter__(self): + pass + + def __exit__(self, *args): + self.Close() + + def TransformLines(self, lines): + with self._lock: + assert self._pool, 'TransformLines() called on a closed Pool.' + + # transformation is broken. + if self._num_restarts == self._max_restarts: + if self._passthrough_on_failure: + return lines + raise Exception('%s is broken.' % self.name) + + # Restart any closed transformer. + for i, d in enumerate(self._pool): + if d.IsClosed(): + logging.warning('%s: Restarting closed instance.', self.name) + self._pool[i] = self.CreateTransformer() + self._num_restarts += 1 + if self._num_restarts == self._max_restarts: + logging.warning('%s: MAX_RESTARTS reached.', self.name) + if self._passthrough_on_failure: + return lines + raise Exception('%s is broken.' % self.name) + + selected = next((x for x in self._pool if x.IsReady()), self._pool[0]) + # Rotate the order so that next caller will not choose the same one. + self._pool.remove(selected) + self._pool.append(selected) + + return selected.TransformLines(lines) + + def Close(self): + with self._lock: + for d in self._pool: + d.Close() + self._pool = None + + @abstractmethod + def CreateTransformer(self): + ... + + @property + @abstractmethod + def name(self): + ... diff --git a/build/android/pylib/symbols/mock_addr2line/mock_addr2line b/build/android/pylib/symbols/mock_addr2line/mock_addr2line index 8b2a72375d76..431f387f8f7c 100755 --- a/build/android/pylib/symbols/mock_addr2line/mock_addr2line +++ b/build/android/pylib/symbols/mock_addr2line/mock_addr2line @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2014 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -9,7 +9,6 @@ Outputs mock symbol information, with each symbol being a function of the original address (so it is easy to double-check consistency in unittests). """ -from __future__ import print_function import optparse import os diff --git a/build/android/pylib/symbols/stack_symbolizer.py b/build/android/pylib/symbols/stack_symbolizer.py index 417374149da5..e3203bfca501 100644 --- a/build/android/pylib/symbols/stack_symbolizer.py +++ b/build/android/pylib/symbols/stack_symbolizer.py @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -10,10 +10,18 @@ from devil.utils import cmd_helper from pylib import constants - -_STACK_TOOL = os.path.join(os.path.dirname(__file__), '..', '..', '..', '..', - 'third_party', 'android_platform', 'development', - 'scripts', 'stack') +from pylib.constants import host_paths +from .expensive_line_transformer import ExpensiveLineTransformer +from .expensive_line_transformer import ExpensiveLineTransformerPool + +_STACK_TOOL = os.path.join(host_paths.ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH, + 'stack') +_MINIMUM_TIMEOUT = 10.0 +_PER_LINE_TIMEOUT = .005 # Should be able to process 200 lines per second. +_PROCESS_START_TIMEOUT = 20.0 +_MAX_RESTARTS = 4 # Should be plenty unless tool is crashing on start-up. +_POOL_SIZE = 1 +_PASSTHROUH_ON_FAILURE = True ABI_REG = re.compile('ABI: \'(.+?)\'') @@ -27,7 +35,7 @@ def _DeviceAbiToArch(device_abi): raise RuntimeError('Unknown device ABI: %s' % device_abi) -class Symbolizer(object): +class Symbolizer: """A helper class to symbolize stack.""" def __init__(self, apk_under_test=None): @@ -72,7 +80,7 @@ def ExtractAndResolveNativeStackTraces(self, data_to_symbolize, constants.GetOutDirectory(), '--more-info'] env = dict(os.environ) env['PYTHONDONTWRITEBYTECODE'] = '1' - with tempfile.NamedTemporaryFile() as f: + with tempfile.NamedTemporaryFile(mode='w') as f: f.write('\n'.join(data_to_symbolize)) f.flush() start = time.time() @@ -84,3 +92,46 @@ def ExtractAndResolveNativeStackTraces(self, data_to_symbolize, if not include_stack and 'Stack Data:' in line: break yield line + + +class PassThroughSymbolizer(ExpensiveLineTransformer): + def __init__(self, device_abi): + self._command = None + super().__init__(_PROCESS_START_TIMEOUT, _MINIMUM_TIMEOUT, + _PER_LINE_TIMEOUT) + if not os.path.exists(_STACK_TOOL): + logging.warning('%s: %s missing. Unable to resolve native stack traces.', + PassThroughSymbolizer.name, _STACK_TOOL) + return + arch = _DeviceAbiToArch(device_abi) + if not arch: + logging.warning('%s: No device_abi can be found.', + PassThroughSymbolizer.name) + return + self._command = [ + _STACK_TOOL, '--arch', arch, '--output-directory', + constants.GetOutDirectory(), '--more-info', '--pass-through', '--flush', + '--quiet', '-' + ] + self.start() + + @property + def name(self): + return "symbolizer" + + @property + def command(self): + return self._command + + +class PassThroughSymbolizerPool(ExpensiveLineTransformerPool): + def __init__(self, device_abi): + self._device_abi = device_abi + super().__init__(_MAX_RESTARTS, _POOL_SIZE, _PASSTHROUH_ON_FAILURE) + + def CreateTransformer(self): + return PassThroughSymbolizer(self._device_abi) + + @property + def name(self): + return "symbolizer-pool" diff --git a/build/android/pylib/symbols/symbol_utils.py b/build/android/pylib/symbols/symbol_utils.py deleted file mode 100644 index dea3c63cafd8..000000000000 --- a/build/android/pylib/symbols/symbol_utils.py +++ /dev/null @@ -1,814 +0,0 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from __future__ import print_function - -import bisect -import collections -import logging -import os -import re - -from pylib.constants import host_paths -from pylib.symbols import elf_symbolizer - - -def _AndroidAbiToCpuArch(android_abi): - """Return the Chromium CPU architecture name for a given Android ABI.""" - _ARCH_MAP = { - 'armeabi': 'arm', - 'armeabi-v7a': 'arm', - 'arm64-v8a': 'arm64', - 'x86_64': 'x64', - } - return _ARCH_MAP.get(android_abi, android_abi) - - -def _HexAddressRegexpFor(android_abi): - """Return a regexp matching hexadecimal addresses for a given Android ABI.""" - if android_abi in ['x86_64', 'arm64-v8a', 'mips64']: - width = 16 - else: - width = 8 - return '[0-9a-f]{%d}' % width - - -class HostLibraryFinder(object): - """Translate device library path to matching host unstripped library path. - - Usage is the following: - 1) Create instance. - 2) Call AddSearchDir() once or more times to add host directory path to - look for unstripped native libraries. - 3) Call Find(device_libpath) repeatedly to translate a device-specific - library path into the corresponding host path to the unstripped - version. - """ - def __init__(self): - """Initialize instance.""" - self._search_dirs = [] - self._lib_map = {} # Map of library name to host file paths. - - def AddSearchDir(self, lib_dir): - """Add a directory to the search path for host native shared libraries. - - Args: - lib_dir: host path containing native libraries. - """ - if not os.path.exists(lib_dir): - logging.warning('Ignoring missing host library directory: %s', lib_dir) - return - if not os.path.isdir(lib_dir): - logging.warning('Ignoring invalid host library directory: %s', lib_dir) - return - self._search_dirs.append(lib_dir) - self._lib_map = {} # Reset the map. - - def Find(self, device_libpath): - """Find the host file path matching a specific device library path. - - Args: - device_libpath: device-specific file path to library or executable. - Returns: - host file path to the unstripped version of the library, or None. - """ - host_lib_path = None - lib_name = os.path.basename(device_libpath) - host_lib_path = self._lib_map.get(lib_name) - if not host_lib_path: - for search_dir in self._search_dirs: - lib_path = os.path.join(search_dir, lib_name) - if os.path.exists(lib_path): - host_lib_path = lib_path - break - - if not host_lib_path: - logging.debug('Could not find host library for: %s', lib_name) - self._lib_map[lib_name] = host_lib_path - - return host_lib_path - - - -class SymbolResolver(object): - """A base class for objets that can symbolize library (path, offset) - pairs into symbol information strings. Usage is the following: - - 1) Create new instance (by calling the constructor of a derived - class, since this is only the base one). - - 2) Call SetAndroidAbi() before any call to FindSymbolInfo() in order - to set the Android CPU ABI used for symbolization. - - 3) Before the first call to FindSymbolInfo(), one can call - AddLibraryOffset(), or AddLibraryOffsets() to record a set of offsets - that you will want to symbolize later through FindSymbolInfo(). Doing - so allows some SymbolResolver derived classes to work faster (e.g. the - one that invokes the 'addr2line' program, since the latter works faster - if the offsets provided as inputs are sorted in increasing order). - - 3) Call FindSymbolInfo(path, offset) to return the corresponding - symbol information string, or None if this doesn't correspond - to anything the instance can handle. - - Note that whether the path is specific to the device or to the - host depends on the derived class implementation. - """ - def __init__(self): - self._android_abi = None - self._lib_offsets_map = collections.defaultdict(set) - - def SetAndroidAbi(self, android_abi): - """Set the Android ABI value for this instance. - - Calling this function before FindSymbolInfo() is required by some - derived class implementations. - - Args: - android_abi: Native Android CPU ABI name (e.g. 'armeabi-v7a'). - Raises: - Exception if the ABI was already set with a different value. - """ - if self._android_abi and self._android_abi != android_abi: - raise Exception('Cannot reset Android ABI to new value %s, already set ' - 'to %s' % (android_abi, self._android_abi)) - - self._android_abi = android_abi - - def AddLibraryOffset(self, lib_path, offset): - """Associate a single offset to a given device library. - - This must be called before FindSymbolInfo(), otherwise its input arguments - will be ignored. - - Args: - lib_path: A library path. - offset: An integer offset within the corresponding library that will be - symbolized by future calls to FindSymbolInfo. - """ - self._lib_offsets_map[lib_path].add(offset) - - def AddLibraryOffsets(self, lib_path, lib_offsets): - """Associate a set of wanted offsets to a given device library. - - This must be called before FindSymbolInfo(), otherwise its input arguments - will be ignored. - - Args: - lib_path: A library path. - lib_offsets: An iterable of integer offsets within the corresponding - library that will be symbolized by future calls to FindSymbolInfo. - """ - self._lib_offsets_map[lib_path].update(lib_offsets) - - # pylint: disable=unused-argument,no-self-use - def FindSymbolInfo(self, lib_path, lib_offset): - """Symbolize a device library path and offset. - - Args: - lib_path: Library path (device or host specific, depending on the - derived class implementation). - lib_offset: Integer offset within the library. - Returns: - Corresponding symbol information string, or None. - """ - # The base implementation cannot symbolize anything. - return None - # pylint: enable=unused-argument,no-self-use - - -class ElfSymbolResolver(SymbolResolver): - """A SymbolResolver that can symbolize host path + offset values using - an elf_symbolizer.ELFSymbolizer instance. - """ - def __init__(self, addr2line_path_for_tests=None): - super(ElfSymbolResolver, self).__init__() - self._addr2line_path = addr2line_path_for_tests - - # Used to cache one ELFSymbolizer instance per library path. - self._elf_symbolizer_cache = {} - - # Used to cache FindSymbolInfo() results. Maps host library paths - # to (offset -> symbol info string) dictionaries. - self._symbol_info_cache = collections.defaultdict(dict) - self._allow_symbolizer = True - - def _CreateSymbolizerFor(self, host_path): - """Create the ELFSymbolizer instance associated with a given lib path.""" - addr2line_path = self._addr2line_path - if not addr2line_path: - if not self._android_abi: - raise Exception( - 'Android CPU ABI must be set before calling FindSymbolInfo!') - - cpu_arch = _AndroidAbiToCpuArch(self._android_abi) - self._addr2line_path = host_paths.ToolPath('addr2line', cpu_arch) - - return elf_symbolizer.ELFSymbolizer( - elf_file_path=host_path, addr2line_path=self._addr2line_path, - callback=ElfSymbolResolver._Callback, inlines=True) - - def DisallowSymbolizerForTesting(self): - """Disallow FindSymbolInfo() from using a symbolizer. - - This is used during unit-testing to ensure that the offsets that were - recorded via AddLibraryOffset()/AddLibraryOffsets() are properly - symbolized, but not anything else. - """ - self._allow_symbolizer = False - - def FindSymbolInfo(self, host_path, offset): - """Override SymbolResolver.FindSymbolInfo. - - Args: - host_path: Host-specific path to the native shared library. - offset: Integer offset within the native library. - Returns: - A symbol info string, or None. - """ - offset_map = self._symbol_info_cache[host_path] - symbol_info = offset_map.get(offset) - if symbol_info: - return symbol_info - - # Create symbolizer on demand. - symbolizer = self._elf_symbolizer_cache.get(host_path) - if not symbolizer: - symbolizer = self._CreateSymbolizerFor(host_path) - self._elf_symbolizer_cache[host_path] = symbolizer - - # If there are pre-recorded offsets for this path, symbolize them now. - offsets = self._lib_offsets_map.get(host_path) - if offsets: - offset_map = {} - for pre_offset in offsets: - symbolizer.SymbolizeAsync( - pre_offset, callback_arg=(offset_map, pre_offset)) - symbolizer.WaitForIdle() - self._symbol_info_cache[host_path] = offset_map - - symbol_info = offset_map.get(offset) - if symbol_info: - return symbol_info - - if not self._allow_symbolizer: - return None - - # Symbolize single offset. Slower if addresses are not provided in - # increasing order to addr2line. - symbolizer.SymbolizeAsync(offset, - callback_arg=(offset_map, offset)) - symbolizer.WaitForIdle() - return offset_map.get(offset) - - @staticmethod - def _Callback(sym_info, callback_arg): - offset_map, offset = callback_arg - offset_map[offset] = str(sym_info) - - -class DeviceSymbolResolver(SymbolResolver): - """A SymbolResolver instance that accepts device-specific path. - - Usage is the following: - 1) Create new instance, passing a parent SymbolResolver instance that - accepts host-specific paths, and a HostLibraryFinder instance. - - 2) Optional: call AddApkOffsets() to add offsets from within an APK - that contains uncompressed native shared libraries. - - 3) Use it as any SymbolResolver instance. - """ - def __init__(self, host_resolver, host_lib_finder): - """Initialize instance. - - Args: - host_resolver: A parent SymbolResolver instance that will be used - to resolve symbols from host library paths. - host_lib_finder: A HostLibraryFinder instance used to locate - unstripped libraries on the host. - """ - super(DeviceSymbolResolver, self).__init__() - self._host_lib_finder = host_lib_finder - self._bad_device_lib_paths = set() - self._host_resolver = host_resolver - - def SetAndroidAbi(self, android_abi): - super(DeviceSymbolResolver, self).SetAndroidAbi(android_abi) - self._host_resolver.SetAndroidAbi(android_abi) - - def AddLibraryOffsets(self, device_lib_path, lib_offsets): - """Associate a set of wanted offsets to a given device library. - - This must be called before FindSymbolInfo(), otherwise its input arguments - will be ignored. - - Args: - device_lib_path: A device-specific library path. - lib_offsets: An iterable of integer offsets within the corresponding - library that will be symbolized by future calls to FindSymbolInfo. - want to symbolize. - """ - if device_lib_path in self._bad_device_lib_paths: - return - - host_lib_path = self._host_lib_finder.Find(device_lib_path) - if not host_lib_path: - # NOTE: self._bad_device_lib_paths is only used to only print this - # warning once per bad library. - logging.warning('Could not find host library matching device path: %s', - device_lib_path) - self._bad_device_lib_paths.add(device_lib_path) - return - - self._host_resolver.AddLibraryOffsets(host_lib_path, lib_offsets) - - def AddApkOffsets(self, device_apk_path, apk_offsets, apk_translator): - """Associate a set of wanted offsets to a given device APK path. - - This converts the APK-relative offsets into offsets relative to the - uncompressed libraries it contains, then calls AddLibraryOffsets() - for each one of the libraries. - - Must be called before FindSymbolInfo() as well, otherwise input arguments - will be ignored. - - Args: - device_apk_path: Device-specific APK path. - apk_offsets: Iterable of offsets within the APK file. - apk_translator: An ApkLibraryPathTranslator instance used to extract - library paths from the APK. - """ - libraries_map = collections.defaultdict(set) - for offset in apk_offsets: - lib_path, lib_offset = apk_translator.TranslatePath(device_apk_path, - offset) - libraries_map[lib_path].add(lib_offset) - - for lib_path, lib_offsets in libraries_map.iteritems(): - self.AddLibraryOffsets(lib_path, lib_offsets) - - def FindSymbolInfo(self, device_path, offset): - """Overrides SymbolResolver.FindSymbolInfo. - - Args: - device_path: Device-specific library path (e.g. - '/data/app/com.example.app-1/lib/x86/libfoo.so') - offset: Offset in device library path. - Returns: - Corresponding symbol information string, or None. - """ - host_path = self._host_lib_finder.Find(device_path) - if not host_path: - return None - - return self._host_resolver.FindSymbolInfo(host_path, offset) - - -class MemoryMap(object): - """Models the memory map of a given process. Usage is: - - 1) Create new instance, passing the Android ABI. - - 2) Call TranslateLine() whenever you want to detect and translate any - memory map input line. - - 3) Otherwise, it is possible to parse the whole memory map input with - ParseLines(), then call FindSectionForAddress() repeatedly in order - to translate a memory address into the corresponding mapping and - file information tuple (e.g. to symbolize stack entries). - """ - - # A named tuple describing interesting memory map line items. - # Fields: - # addr_start: Mapping start address in memory. - # file_offset: Corresponding file offset. - # file_size: Corresponding mapping size in bytes. - # file_path: Input file path. - # match: Corresponding regular expression match object. - LineTuple = collections.namedtuple('MemoryMapLineTuple', - 'addr_start,file_offset,file_size,' - 'file_path, match') - - # A name tuple describing a memory map section. - # Fields: - # address: Memory address. - # size: Size in bytes in memory - # offset: Starting file offset. - # path: Input file path. - SectionTuple = collections.namedtuple('MemoryMapSection', - 'address,size,offset,path') - - def __init__(self, android_abi): - """Initializes instance. - - Args: - android_abi: Android CPU ABI name (e.g. 'armeabi-v7a') - """ - hex_addr = _HexAddressRegexpFor(android_abi) - - # pylint: disable=line-too-long - # A regular expression used to match memory map entries which look like: - # b278c000-b2790fff r-- 4fda000 5000 /data/app/com.google.android.apps.chrome-2/base.apk - # pylint: enable=line-too-long - self._re_map_section = re.compile( - r'\s*(?P' + hex_addr + r')-(?P' + hex_addr + ')' + - r'\s+' + - r'(?P...)\s+' + - r'(?P[0-9a-f]+)\s+' + - r'(?P[0-9a-f]+)\s*' + - r'(?P[^ \t]+)?') - - self._addr_map = [] # Sorted list of (address, size, path, offset) tuples. - self._sorted_addresses = [] # Sorted list of address fields in _addr_map. - self._in_section = False - - def TranslateLine(self, line, apk_path_translator): - """Try to translate a memory map input line, if detected. - - This only takes care of converting mapped APK file path and offsets - into a corresponding uncompressed native library file path + new offsets, - e.g. '..... /data/.../base.apk' gets - translated into '.... /data/.../base.apk!lib/libfoo.so' - - This function should always work, even if ParseLines() was not called - previously. - - Args: - line: Input memory map / tombstone line. - apk_translator: An ApkLibraryPathTranslator instance, used to map - APK offsets into uncompressed native libraries + new offsets. - Returns: - Translated memory map line, if relevant, or unchanged input line - otherwise. - """ - t = self._ParseLine(line.rstrip()) - if not t: - return line - - new_path, new_offset = apk_path_translator.TranslatePath( - t.file_path, t.file_offset) - - if new_path == t.file_path: - return line - - pos = t.match.start('file_path') - return '%s%s (offset 0x%x)%s' % (line[0:pos], new_path, new_offset, - line[t.match.end('file_path'):]) - - def ParseLines(self, input_lines, in_section=False): - """Parse a list of input lines and extract the APK memory map out of it. - - Args: - input_lines: list, or iterable, of input lines. - in_section: Optional. If true, considers that the input lines are - already part of the memory map. Otherwise, wait until the start of - the section appears in the input before trying to record data. - Returns: - True iff APK-related memory map entries were found. False otherwise. - """ - addr_list = [] # list of (address, size, file_path, file_offset) tuples. - self._in_section = in_section - for line in input_lines: - t = self._ParseLine(line.rstrip()) - if not t: - continue - - addr_list.append(t) - - self._addr_map = sorted(addr_list, - lambda x, y: cmp(x.addr_start, y.addr_start)) - self._sorted_addresses = [e.addr_start for e in self._addr_map] - return bool(self._addr_map) - - def _ParseLine(self, line): - """Used internally to recognized memory map input lines. - - Args: - line: Input logcat or tomstone line. - Returns: - A LineTuple instance on success, or None on failure. - """ - if not self._in_section: - self._in_section = line.startswith('memory map:') - return None - - m = self._re_map_section.match(line) - if not m: - self._in_section = False # End of memory map section - return None - - # Only accept .apk and .so files that are not from the system partitions. - file_path = m.group('file_path') - if not file_path: - return None - - if file_path.startswith('/system') or file_path.startswith('/vendor'): - return None - - if not (file_path.endswith('.apk') or file_path.endswith('.so')): - return None - - addr_start = int(m.group('addr_start'), 16) - file_offset = int(m.group('file_offset'), 16) - file_size = int(m.group('file_size'), 16) - - return self.LineTuple(addr_start, file_offset, file_size, file_path, m) - - def Dump(self): - """Print memory map for debugging.""" - print('MEMORY MAP [') - for t in self._addr_map: - print('[%08x-%08x %08x %08x %s]' % - (t.addr_start, t.addr_start + t.file_size, t.file_size, - t.file_offset, t.file_path)) - print('] MEMORY MAP') - - def FindSectionForAddress(self, addr): - """Find the map section corresponding to a specific memory address. - - Call this method only after using ParseLines() was called to extract - relevant information from the memory map. - - Args: - addr: Memory address - Returns: - A SectionTuple instance on success, or None on failure. - """ - pos = bisect.bisect_right(self._sorted_addresses, addr) - if pos > 0: - # All values in [0,pos) are <= addr, just ensure that the last - # one contains the address as well. - entry = self._addr_map[pos - 1] - if entry.addr_start + entry.file_size > addr: - return self.SectionTuple(entry.addr_start, entry.file_size, - entry.file_offset, entry.file_path) - return None - - -class BacktraceTranslator(object): - """Translates backtrace-related lines in a tombstone or crash report. - - Usage is the following: - 1) Create new instance with appropriate arguments. - 2) If the tombstone / logcat input is available, one can call - FindLibraryOffsets() in order to detect which library offsets - will need to be symbolized during a future parse. Doing so helps - speed up the ELF symbolizer. - 3) For each tombstone/logcat input line, call TranslateLine() to - try to detect and symbolize backtrace lines. - """ - - # A named tuple for relevant input backtrace lines. - # Fields: - # rel_pc: Instruction pointer, relative to offset in library start. - # location: Library or APK file path. - # offset: Load base of executable code in library or apk file path. - # match: The corresponding regular expression match object. - # Note: - # The actual instruction pointer always matches the position at - # |offset + rel_pc| in |location|. - LineTuple = collections.namedtuple('BacktraceLineTuple', - 'rel_pc,location,offset,match') - - def __init__(self, android_abi, apk_translator): - """Initialize instance. - - Args: - android_abi: Android CPU ABI name (e.g. 'armeabi-v7a'). - apk_translator: ApkLibraryPathTranslator instance used to convert - mapped APK file offsets into uncompressed library file paths with - new offsets. - """ - hex_addr = _HexAddressRegexpFor(android_abi) - - # A regular expression used to match backtrace lines. - self._re_backtrace = re.compile( - r'.*#(?P[0-9]{2})\s+' + - r'(..)\s+' + - r'(?P' + hex_addr + r')\s+' + - r'(?P[^ \t]+)' + - r'(\s+\(offset 0x(?P[0-9a-f]+)\))?') - - # In certain cases, offset will be provided as +0x - # instead of (offset 0x). This is a regexp to detect - # this. - self._re_location_offset = re.compile( - r'.*\+0x(?P[0-9a-f]+)$') - - self._apk_translator = apk_translator - self._in_section = False - - def _ParseLine(self, line): - """Used internally to detect and decompose backtrace input lines. - - Args: - line: input tombstone line. - Returns: - A LineTuple instance on success, None on failure. - """ - if not self._in_section: - self._in_section = line.startswith('backtrace:') - return None - - line = line.rstrip() - m = self._re_backtrace.match(line) - if not m: - self._in_section = False - return None - - location = m.group('location') - offset = m.group('offset') - if not offset: - m2 = self._re_location_offset.match(location) - if m2: - offset = m2.group('offset') - location = location[0:m2.start('offset') - 3] - - if not offset: - return None - - offset = int(offset, 16) - rel_pc = int(m.group('rel_pc'), 16) - - # Two cases to consider here: - # - # * If this is a library file directly mapped in memory, then |rel_pc| - # if the direct offset within the library, and doesn't need any kind - # of adjustement. - # - # * If this is a library mapped directly from an .apk file, then - # |rel_pc| is the offset in the APK, and |offset| happens to be the - # load base of the corresponding library. - # - if location.endswith('.so'): - # For a native library directly mapped from the file system, - return self.LineTuple(rel_pc, location, offset, m) - - if location.endswith('.apk'): - # For a native library inside an memory-mapped APK file, - new_location, new_offset = self._apk_translator.TranslatePath( - location, offset) - - return self.LineTuple(rel_pc, new_location, new_offset, m) - - # Ignore anything else (e.g. .oat or .odex files). - return None - - def FindLibraryOffsets(self, input_lines, in_section=False): - """Parse a tombstone's backtrace section and find all library offsets in it. - - Args: - input_lines: List or iterables of intput tombstone lines. - in_section: Optional. If True, considers that the stack section has - already started. - Returns: - A dictionary mapping device library paths to sets of offsets within - then. - """ - self._in_section = in_section - result = collections.defaultdict(set) - for line in input_lines: - t = self._ParseLine(line) - if not t: - continue - - result[t.location].add(t.offset + t.rel_pc) - return result - - def TranslateLine(self, line, symbol_resolver): - """Symbolize backtrace line if recognized. - - Args: - line: input backtrace line. - symbol_resolver: symbol resolver instance to use. This method will - call its FindSymbolInfo(device_lib_path, lib_offset) method to - convert offsets into symbol informations strings. - Returns: - Translated line (unchanged if not recognized as a back trace). - """ - t = self._ParseLine(line) - if not t: - return line - - symbol_info = symbol_resolver.FindSymbolInfo(t.location, - t.offset + t.rel_pc) - if not symbol_info: - symbol_info = 'offset 0x%x' % t.offset - - pos = t.match.start('location') - pos2 = t.match.end('offset') + 1 - if pos2 <= 0: - pos2 = t.match.end('location') - return '%s%s (%s)%s' % (line[:pos], t.location, symbol_info, line[pos2:]) - - -class StackTranslator(object): - """Translates stack-related lines in a tombstone or crash report.""" - - # A named tuple describing relevant stack input lines. - # Fields: - # address: Address as it appears in the stack. - # lib_path: Library path where |address| is mapped. - # lib_offset: Library load base offset. for |lib_path|. - # match: Corresponding regular expression match object. - LineTuple = collections.namedtuple('StackLineTuple', - 'address, lib_path, lib_offset, match') - - def __init__(self, android_abi, memory_map, apk_translator): - """Initialize instance.""" - hex_addr = _HexAddressRegexpFor(android_abi) - - # pylint: disable=line-too-long - # A regular expression used to recognize stack entries like: - # - # #05 bf89a180 bf89a1e4 [stack] - # bf89a1c8 a0c01c51 /data/app/com.google.android.apps.chrome-2/base.apk - # bf89a080 00000000 - # ........ ........ - # pylint: enable=line-too-long - self._re_stack_line = re.compile( - r'\s+(?P#[0-9]+)?\s*' + - r'(?P' + hex_addr + r')\s+' + - r'(?P' + hex_addr + r')' + - r'(\s+(?P[^ \t]+))?') - - self._re_stack_abbrev = re.compile(r'\s+[.]+\s+[.]+') - - self._memory_map = memory_map - self._apk_translator = apk_translator - self._in_section = False - - def _ParseLine(self, line): - """Check a given input line for a relevant _re_stack_line match. - - Args: - line: input tombstone line. - Returns: - A LineTuple instance on success, None on failure. - """ - line = line.rstrip() - if not self._in_section: - self._in_section = line.startswith('stack:') - return None - - m = self._re_stack_line.match(line) - if not m: - if not self._re_stack_abbrev.match(line): - self._in_section = False - return None - - location = m.group('location') - if not location: - return None - - if not location.endswith('.apk') and not location.endswith('.so'): - return None - - addr = int(m.group('stack_value'), 16) - t = self._memory_map.FindSectionForAddress(addr) - if t is None: - return None - - lib_path = t.path - lib_offset = t.offset + (addr - t.address) - - if lib_path.endswith('.apk'): - lib_path, lib_offset = self._apk_translator.TranslatePath( - lib_path, lib_offset) - - return self.LineTuple(addr, lib_path, lib_offset, m) - - def FindLibraryOffsets(self, input_lines, in_section=False): - """Parse a tombstone's stack section and find all library offsets in it. - - Args: - input_lines: List or iterables of intput tombstone lines. - in_section: Optional. If True, considers that the stack section has - already started. - Returns: - A dictionary mapping device library paths to sets of offsets within - then. - """ - result = collections.defaultdict(set) - self._in_section = in_section - for line in input_lines: - t = self._ParseLine(line) - if t: - result[t.lib_path].add(t.lib_offset) - return result - - def TranslateLine(self, line, symbol_resolver=None): - """Try to translate a line of the stack dump.""" - t = self._ParseLine(line) - if not t: - return line - - symbol_info = symbol_resolver.FindSymbolInfo(t.lib_path, t.lib_offset) - if not symbol_info: - return line - - pos = t.match.start('location') - pos2 = t.match.end('location') - return '%s%s (%s)%s' % (line[:pos], t.lib_path, symbol_info, line[pos2:]) diff --git a/build/android/pylib/symbols/symbol_utils_unittest.py b/build/android/pylib/symbols/symbol_utils_unittest.py deleted file mode 100644 index ed87f9ed770a..000000000000 --- a/build/android/pylib/symbols/symbol_utils_unittest.py +++ /dev/null @@ -1,942 +0,0 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import collections -import contextlib -import logging -import os -import re -import shutil -import tempfile -import unittest - -from pylib.symbols import apk_native_libs_unittest -from pylib.symbols import mock_addr2line -from pylib.symbols import symbol_utils - -_MOCK_ELF_DATA = apk_native_libs_unittest.MOCK_ELF_DATA - -_MOCK_A2L_PATH = os.path.join(os.path.dirname(mock_addr2line.__file__), - 'mock_addr2line') - - -# pylint: disable=line-too-long - -# list of (start_offset, end_offset, size, libpath) tuples corresponding -# to the content of base.apk. This was taken from an x86 ChromeModern.apk -# component build. -_TEST_APK_LIBS = [ - (0x01331000, 0x013696bc, 0x000386bc, 'libaccessibility.cr.so'), - (0x0136a000, 0x013779c4, 0x0000d9c4, 'libanimation.cr.so'), - (0x01378000, 0x0137f7e8, 0x000077e8, 'libapdu.cr.so'), - (0x01380000, 0x0155ccc8, 0x001dccc8, 'libbase.cr.so'), - (0x0155d000, 0x015ab98c, 0x0004e98c, 'libbase_i18n.cr.so'), - (0x015ac000, 0x015dff4c, 0x00033f4c, 'libbindings.cr.so'), - (0x015e0000, 0x015f5a54, 0x00015a54, 'libbindings_base.cr.so'), - (0x0160e000, 0x01731960, 0x00123960, 'libblink_common.cr.so'), - (0x01732000, 0x0174ce54, 0x0001ae54, 'libblink_controller.cr.so'), - (0x0174d000, 0x0318c528, 0x01a3f528, 'libblink_core.cr.so'), - (0x0318d000, 0x03191700, 0x00004700, 'libblink_mojom_broadcastchannel_bindings_shared.cr.so'), - (0x03192000, 0x03cd7918, 0x00b45918, 'libblink_modules.cr.so'), - (0x03cd8000, 0x03d137d0, 0x0003b7d0, 'libblink_mojo_bindings_shared.cr.so'), - (0x03d14000, 0x03d2670c, 0x0001270c, 'libblink_offscreen_canvas_mojo_bindings_shared.cr.so'), - (0x03d27000, 0x046c7054, 0x009a0054, 'libblink_platform.cr.so'), - (0x046c8000, 0x0473fbfc, 0x00077bfc, 'libbluetooth.cr.so'), - (0x04740000, 0x04878f40, 0x00138f40, 'libboringssl.cr.so'), - (0x04879000, 0x0498466c, 0x0010b66c, 'libc++_shared.so'), - (0x04985000, 0x0498d93c, 0x0000893c, 'libcaptive_portal.cr.so'), - (0x0498e000, 0x049947cc, 0x000067cc, 'libcapture_base.cr.so'), - (0x04995000, 0x04b39f18, 0x001a4f18, 'libcapture_lib.cr.so'), - (0x04b3a000, 0x04b488ec, 0x0000e8ec, 'libcbor.cr.so'), - (0x04b49000, 0x04e9ea5c, 0x00355a5c, 'libcc.cr.so'), - (0x04e9f000, 0x04ed6404, 0x00037404, 'libcc_animation.cr.so'), - (0x04ed7000, 0x04ef5ab4, 0x0001eab4, 'libcc_base.cr.so'), - (0x04ef6000, 0x04fd9364, 0x000e3364, 'libcc_blink.cr.so'), - (0x04fda000, 0x04fe2758, 0x00008758, 'libcc_debug.cr.so'), - (0x04fe3000, 0x0500ae0c, 0x00027e0c, 'libcc_ipc.cr.so'), - (0x0500b000, 0x05078f38, 0x0006df38, 'libcc_paint.cr.so'), - (0x05079000, 0x0507e734, 0x00005734, 'libcdm_manager.cr.so'), - (0x0507f000, 0x06f4d744, 0x01ece744, 'libchrome.cr.so'), - (0x06f54000, 0x06feb830, 0x00097830, 'libchromium_sqlite3.cr.so'), - (0x06fec000, 0x0706f554, 0x00083554, 'libclient.cr.so'), - (0x07070000, 0x0708da60, 0x0001da60, 'libcloud_policy_proto_generated_compile.cr.so'), - (0x0708e000, 0x07121f28, 0x00093f28, 'libcodec.cr.so'), - (0x07122000, 0x07134ab8, 0x00012ab8, 'libcolor_space.cr.so'), - (0x07135000, 0x07138614, 0x00003614, 'libcommon.cr.so'), - (0x07139000, 0x0717c938, 0x00043938, 'libcompositor.cr.so'), - (0x0717d000, 0x0923d78c, 0x020c078c, 'libcontent.cr.so'), - (0x0923e000, 0x092ae87c, 0x0007087c, 'libcontent_common_mojo_bindings_shared.cr.so'), - (0x092af000, 0x092be718, 0x0000f718, 'libcontent_public_common_mojo_bindings_shared.cr.so'), - (0x092bf000, 0x092d9a20, 0x0001aa20, 'libcrash_key.cr.so'), - (0x092da000, 0x092eda58, 0x00013a58, 'libcrcrypto.cr.so'), - (0x092ee000, 0x092f16e0, 0x000036e0, 'libdevice_base.cr.so'), - (0x092f2000, 0x092fe8d8, 0x0000c8d8, 'libdevice_event_log.cr.so'), - (0x092ff000, 0x093026a4, 0x000036a4, 'libdevice_features.cr.so'), - (0x09303000, 0x093f1220, 0x000ee220, 'libdevice_gamepad.cr.so'), - (0x093f2000, 0x09437f54, 0x00045f54, 'libdevice_vr_mojo_bindings.cr.so'), - (0x09438000, 0x0954c168, 0x00114168, 'libdevice_vr_mojo_bindings_blink.cr.so'), - (0x0954d000, 0x0955d720, 0x00010720, 'libdevice_vr_mojo_bindings_shared.cr.so'), - (0x0955e000, 0x0956b9c0, 0x0000d9c0, 'libdevices.cr.so'), - (0x0956c000, 0x0957cae8, 0x00010ae8, 'libdiscardable_memory_client.cr.so'), - (0x0957d000, 0x09588854, 0x0000b854, 'libdiscardable_memory_common.cr.so'), - (0x09589000, 0x0959cbb4, 0x00013bb4, 'libdiscardable_memory_service.cr.so'), - (0x0959d000, 0x095b6b90, 0x00019b90, 'libdisplay.cr.so'), - (0x095b7000, 0x095be930, 0x00007930, 'libdisplay_types.cr.so'), - (0x095bf000, 0x095c46c4, 0x000056c4, 'libdisplay_util.cr.so'), - (0x095c5000, 0x095f54a4, 0x000304a4, 'libdomain_reliability.cr.so'), - (0x095f6000, 0x0966fe08, 0x00079e08, 'libembedder.cr.so'), - (0x09670000, 0x096735f8, 0x000035f8, 'libembedder_switches.cr.so'), - (0x09674000, 0x096a3460, 0x0002f460, 'libevents.cr.so'), - (0x096a4000, 0x096b6d40, 0x00012d40, 'libevents_base.cr.so'), - (0x096b7000, 0x0981a778, 0x00163778, 'libffmpeg.cr.so'), - (0x0981b000, 0x09945c94, 0x0012ac94, 'libfido.cr.so'), - (0x09946000, 0x09a330dc, 0x000ed0dc, 'libfingerprint.cr.so'), - (0x09a34000, 0x09b53170, 0x0011f170, 'libfreetype_harfbuzz.cr.so'), - (0x09b54000, 0x09bc5c5c, 0x00071c5c, 'libgcm.cr.so'), - (0x09bc6000, 0x09cc8584, 0x00102584, 'libgeolocation.cr.so'), - (0x09cc9000, 0x09cdc8d4, 0x000138d4, 'libgeometry.cr.so'), - (0x09cdd000, 0x09cec8b4, 0x0000f8b4, 'libgeometry_skia.cr.so'), - (0x09ced000, 0x09d10e14, 0x00023e14, 'libgesture_detection.cr.so'), - (0x09d11000, 0x09d7595c, 0x0006495c, 'libgfx.cr.so'), - (0x09d76000, 0x09d7d7cc, 0x000077cc, 'libgfx_ipc.cr.so'), - (0x09d7e000, 0x09d82708, 0x00004708, 'libgfx_ipc_buffer_types.cr.so'), - (0x09d83000, 0x09d89748, 0x00006748, 'libgfx_ipc_color.cr.so'), - (0x09d8a000, 0x09d8f6f4, 0x000056f4, 'libgfx_ipc_geometry.cr.so'), - (0x09d90000, 0x09d94754, 0x00004754, 'libgfx_ipc_skia.cr.so'), - (0x09d95000, 0x09d9869c, 0x0000369c, 'libgfx_switches.cr.so'), - (0x09d99000, 0x09dba0ac, 0x000210ac, 'libgin.cr.so'), - (0x09dbb000, 0x09e0a8cc, 0x0004f8cc, 'libgl_in_process_context.cr.so'), - (0x09e0b000, 0x09e17a18, 0x0000ca18, 'libgl_init.cr.so'), - (0x09e18000, 0x09ee34e4, 0x000cb4e4, 'libgl_wrapper.cr.so'), - (0x09ee4000, 0x0a1a2e00, 0x002bee00, 'libgles2.cr.so'), - (0x0a1a3000, 0x0a24556c, 0x000a256c, 'libgles2_implementation.cr.so'), - (0x0a246000, 0x0a267038, 0x00021038, 'libgles2_utils.cr.so'), - (0x0a268000, 0x0a3288e4, 0x000c08e4, 'libgpu.cr.so'), - (0x0a329000, 0x0a3627ec, 0x000397ec, 'libgpu_ipc_service.cr.so'), - (0x0a363000, 0x0a388a18, 0x00025a18, 'libgpu_util.cr.so'), - (0x0a389000, 0x0a506d8c, 0x0017dd8c, 'libhost.cr.so'), - (0x0a507000, 0x0a6f0ec0, 0x001e9ec0, 'libicui18n.cr.so'), - (0x0a6f1000, 0x0a83b4c8, 0x0014a4c8, 'libicuuc.cr.so'), - (0x0a83c000, 0x0a8416e4, 0x000056e4, 'libinterfaces_shared.cr.so'), - (0x0a842000, 0x0a87e2a0, 0x0003c2a0, 'libipc.cr.so'), - (0x0a87f000, 0x0a88c98c, 0x0000d98c, 'libipc_mojom.cr.so'), - (0x0a88d000, 0x0a8926e4, 0x000056e4, 'libipc_mojom_shared.cr.so'), - (0x0a893000, 0x0a8a1e18, 0x0000ee18, 'libkeyed_service_content.cr.so'), - (0x0a8a2000, 0x0a8b4a30, 0x00012a30, 'libkeyed_service_core.cr.so'), - (0x0a8b5000, 0x0a930a80, 0x0007ba80, 'libleveldatabase.cr.so'), - (0x0a931000, 0x0a9b3908, 0x00082908, 'libmanager.cr.so'), - (0x0a9b4000, 0x0aea9bb4, 0x004f5bb4, 'libmedia.cr.so'), - (0x0aeaa000, 0x0b08cb88, 0x001e2b88, 'libmedia_blink.cr.so'), - (0x0b08d000, 0x0b0a4728, 0x00017728, 'libmedia_devices_mojo_bindings_shared.cr.so'), - (0x0b0a5000, 0x0b1943ec, 0x000ef3ec, 'libmedia_gpu.cr.so'), - (0x0b195000, 0x0b2d07d4, 0x0013b7d4, 'libmedia_mojo_services.cr.so'), - (0x0b2d1000, 0x0b2d4760, 0x00003760, 'libmessage_center.cr.so'), - (0x0b2d5000, 0x0b2e0938, 0x0000b938, 'libmessage_support.cr.so'), - (0x0b2e1000, 0x0b2f3ad0, 0x00012ad0, 'libmetrics_cpp.cr.so'), - (0x0b2f4000, 0x0b313bb8, 0x0001fbb8, 'libmidi.cr.so'), - (0x0b314000, 0x0b31b848, 0x00007848, 'libmojo_base_lib.cr.so'), - (0x0b31c000, 0x0b3329f8, 0x000169f8, 'libmojo_base_mojom.cr.so'), - (0x0b333000, 0x0b34b98c, 0x0001898c, 'libmojo_base_mojom_blink.cr.so'), - (0x0b34c000, 0x0b354700, 0x00008700, 'libmojo_base_mojom_shared.cr.so'), - (0x0b355000, 0x0b3608b0, 0x0000b8b0, 'libmojo_base_shared_typemap_traits.cr.so'), - (0x0b361000, 0x0b3ad454, 0x0004c454, 'libmojo_edk.cr.so'), - (0x0b3ae000, 0x0b3c4a20, 0x00016a20, 'libmojo_edk_ports.cr.so'), - (0x0b3c5000, 0x0b3d38a0, 0x0000e8a0, 'libmojo_mojom_bindings.cr.so'), - (0x0b3d4000, 0x0b3da6e8, 0x000066e8, 'libmojo_mojom_bindings_shared.cr.so'), - (0x0b3db000, 0x0b3e27f0, 0x000077f0, 'libmojo_public_system.cr.so'), - (0x0b3e3000, 0x0b3fa9fc, 0x000179fc, 'libmojo_public_system_cpp.cr.so'), - (0x0b3fb000, 0x0b407728, 0x0000c728, 'libmojom_core_shared.cr.so'), - (0x0b408000, 0x0b421744, 0x00019744, 'libmojom_platform_shared.cr.so'), - (0x0b422000, 0x0b43451c, 0x0001251c, 'libnative_theme.cr.so'), - (0x0b435000, 0x0baaa1bc, 0x006751bc, 'libnet.cr.so'), - (0x0bac4000, 0x0bb74670, 0x000b0670, 'libnetwork_cpp.cr.so'), - (0x0bb75000, 0x0bbaee8c, 0x00039e8c, 'libnetwork_cpp_base.cr.so'), - (0x0bbaf000, 0x0bd21844, 0x00172844, 'libnetwork_service.cr.so'), - (0x0bd22000, 0x0bd256e4, 0x000036e4, 'libnetwork_session_configurator.cr.so'), - (0x0bd26000, 0x0bd33734, 0x0000d734, 'libonc.cr.so'), - (0x0bd34000, 0x0bd9ce18, 0x00068e18, 'libperfetto.cr.so'), - (0x0bd9d000, 0x0bda4854, 0x00007854, 'libplatform.cr.so'), - (0x0bda5000, 0x0bec5ce4, 0x00120ce4, 'libpolicy_component.cr.so'), - (0x0bec6000, 0x0bf5ab58, 0x00094b58, 'libpolicy_proto.cr.so'), - (0x0bf5b000, 0x0bf86fbc, 0x0002bfbc, 'libprefs.cr.so'), - (0x0bf87000, 0x0bfa5d74, 0x0001ed74, 'libprinting.cr.so'), - (0x0bfa6000, 0x0bfe0e80, 0x0003ae80, 'libprotobuf_lite.cr.so'), - (0x0bfe1000, 0x0bff0a18, 0x0000fa18, 'libproxy_config.cr.so'), - (0x0bff1000, 0x0c0f6654, 0x00105654, 'libpublic.cr.so'), - (0x0c0f7000, 0x0c0fa6a4, 0x000036a4, 'librange.cr.so'), - (0x0c0fb000, 0x0c118058, 0x0001d058, 'libraster.cr.so'), - (0x0c119000, 0x0c133d00, 0x0001ad00, 'libresource_coordinator_cpp.cr.so'), - (0x0c134000, 0x0c1396a0, 0x000056a0, 'libresource_coordinator_cpp_base.cr.so'), - (0x0c13a000, 0x0c1973b8, 0x0005d3b8, 'libresource_coordinator_public_mojom.cr.so'), - (0x0c198000, 0x0c2033e8, 0x0006b3e8, 'libresource_coordinator_public_mojom_blink.cr.so'), - (0x0c204000, 0x0c219744, 0x00015744, 'libresource_coordinator_public_mojom_shared.cr.so'), - (0x0c21a000, 0x0c21e700, 0x00004700, 'libsandbox.cr.so'), - (0x0c21f000, 0x0c22f96c, 0x0001096c, 'libsandbox_services.cr.so'), - (0x0c230000, 0x0c249d58, 0x00019d58, 'libseccomp_bpf.cr.so'), - (0x0c24a000, 0x0c24e714, 0x00004714, 'libseccomp_starter_android.cr.so'), - (0x0c24f000, 0x0c4ae9f0, 0x0025f9f0, 'libservice.cr.so'), - (0x0c4af000, 0x0c4c3ae4, 0x00014ae4, 'libservice_manager_cpp.cr.so'), - (0x0c4c4000, 0x0c4cb708, 0x00007708, 'libservice_manager_cpp_types.cr.so'), - (0x0c4cc000, 0x0c4fbe30, 0x0002fe30, 'libservice_manager_mojom.cr.so'), - (0x0c4fc000, 0x0c532e78, 0x00036e78, 'libservice_manager_mojom_blink.cr.so'), - (0x0c533000, 0x0c53669c, 0x0000369c, 'libservice_manager_mojom_constants.cr.so'), - (0x0c537000, 0x0c53e85c, 0x0000785c, 'libservice_manager_mojom_constants_blink.cr.so'), - (0x0c53f000, 0x0c542668, 0x00003668, 'libservice_manager_mojom_constants_shared.cr.so'), - (0x0c543000, 0x0c54d700, 0x0000a700, 'libservice_manager_mojom_shared.cr.so'), - (0x0c54e000, 0x0c8fc6ec, 0x003ae6ec, 'libsessions.cr.so'), - (0x0c8fd000, 0x0c90a924, 0x0000d924, 'libshared_memory_support.cr.so'), - (0x0c90b000, 0x0c9148ec, 0x000098ec, 'libshell_dialogs.cr.so'), - (0x0c915000, 0x0cf8de70, 0x00678e70, 'libskia.cr.so'), - (0x0cf8e000, 0x0cf978bc, 0x000098bc, 'libsnapshot.cr.so'), - (0x0cf98000, 0x0cfb7d9c, 0x0001fd9c, 'libsql.cr.so'), - (0x0cfb8000, 0x0cfbe744, 0x00006744, 'libstartup_tracing.cr.so'), - (0x0cfbf000, 0x0d19b4e4, 0x001dc4e4, 'libstorage_browser.cr.so'), - (0x0d19c000, 0x0d2a773c, 0x0010b73c, 'libstorage_common.cr.so'), - (0x0d2a8000, 0x0d2ac6fc, 0x000046fc, 'libsurface.cr.so'), - (0x0d2ad000, 0x0d2baa98, 0x0000da98, 'libtracing.cr.so'), - (0x0d2bb000, 0x0d2f36b0, 0x000386b0, 'libtracing_cpp.cr.so'), - (0x0d2f4000, 0x0d326e70, 0x00032e70, 'libtracing_mojom.cr.so'), - (0x0d327000, 0x0d33270c, 0x0000b70c, 'libtracing_mojom_shared.cr.so'), - (0x0d333000, 0x0d46d804, 0x0013a804, 'libui_android.cr.so'), - (0x0d46e000, 0x0d4cb3f8, 0x0005d3f8, 'libui_base.cr.so'), - (0x0d4cc000, 0x0d4dbc40, 0x0000fc40, 'libui_base_ime.cr.so'), - (0x0d4dc000, 0x0d4e58d4, 0x000098d4, 'libui_data_pack.cr.so'), - (0x0d4e6000, 0x0d51d1e0, 0x000371e0, 'libui_devtools.cr.so'), - (0x0d51e000, 0x0d52b984, 0x0000d984, 'libui_message_center_cpp.cr.so'), - (0x0d52c000, 0x0d539a48, 0x0000da48, 'libui_touch_selection.cr.so'), - (0x0d53a000, 0x0d55bc60, 0x00021c60, 'liburl.cr.so'), - (0x0d55c000, 0x0d55f6b4, 0x000036b4, 'liburl_ipc.cr.so'), - (0x0d560000, 0x0d5af110, 0x0004f110, 'liburl_matcher.cr.so'), - (0x0d5b0000, 0x0d5e2fac, 0x00032fac, 'libuser_manager.cr.so'), - (0x0d5e3000, 0x0d5e66e4, 0x000036e4, 'libuser_prefs.cr.so'), - (0x0d5e7000, 0x0e3e1cc8, 0x00dfacc8, 'libv8.cr.so'), - (0x0e3e2000, 0x0e400ae0, 0x0001eae0, 'libv8_libbase.cr.so'), - (0x0e401000, 0x0e4d91d4, 0x000d81d4, 'libviz_common.cr.so'), - (0x0e4da000, 0x0e4df7e4, 0x000057e4, 'libviz_resource_format.cr.so'), - (0x0e4e0000, 0x0e5b7120, 0x000d7120, 'libweb_dialogs.cr.so'), - (0x0e5b8000, 0x0e5c7a18, 0x0000fa18, 'libwebdata_common.cr.so'), - (0x0e5c8000, 0x0e61bfe4, 0x00053fe4, 'libwtf.cr.so'), -] - - -# A small memory map fragment extracted from a tombstone for a process that -# had loaded the APK corresponding to _TEST_APK_LIBS above. -_TEST_MEMORY_MAP = r'''memory map: -12c00000-12ccafff rw- 0 cb000 /dev/ashmem/dalvik-main space (deleted) -12ccb000-130cafff rw- cb000 400000 /dev/ashmem/dalvik-main space (deleted) -130cb000-32bfffff --- 4cb000 1fb35000 /dev/ashmem/dalvik-main space (deleted) -32c00000-32c00fff rw- 0 1000 /dev/ashmem/dalvik-main space 1 (deleted) -32c01000-52bfffff --- 1000 1ffff000 /dev/ashmem/dalvik-main space 1 (deleted) -6f3b8000-6fd90fff rw- 0 9d9000 /data/dalvik-cache/x86/system@framework@boot.art -6fd91000-71c42fff r-- 0 1eb2000 /data/dalvik-cache/x86/system@framework@boot.oat -71c43000-7393efff r-x 1eb2000 1cfc000 /data/dalvik-cache/x86/system@framework@boot.oat (load base 0x71c43000) -7393f000-7393ffff rw- 3bae000 1000 /data/dalvik-cache/x86/system@framework@boot.oat -73940000-73a1bfff rw- 0 dc000 /dev/ashmem/dalvik-zygote space (deleted) -73a1c000-73a1cfff rw- 0 1000 /dev/ashmem/dalvik-non moving space (deleted) -73a1d000-73a2dfff rw- 1000 11000 /dev/ashmem/dalvik-non moving space (deleted) -73a2e000-77540fff --- 12000 3b13000 /dev/ashmem/dalvik-non moving space (deleted) -77541000-7793ffff rw- 3b25000 3ff000 /dev/ashmem/dalvik-non moving space (deleted) -923aa000-92538fff r-- 8a9000 18f000 /data/app/com.example.app-2/base.apk -92539000-9255bfff r-- 0 23000 /data/data/com.example.app/app_data/paks/es.pak@162db1c6689 -9255c000-92593fff r-- 213000 38000 /data/app/com.example.app-2/base.apk -92594000-925c0fff r-- 87d000 2d000 /data/app/com.example.app-2/base.apk -925c1000-927d3fff r-- a37000 213000 /data/app/com.example.app-2/base.apk -927d4000-92e07fff r-- 24a000 634000 /data/app/com.example.app-2/base.apk -92e08000-92e37fff r-- a931000 30000 /data/app/com.example.app-2/base.apk -92e38000-92e86fff r-x a961000 4f000 /data/app/com.example.app-2/base.apk -92e87000-92e8afff rw- a9b0000 4000 /data/app/com.example.app-2/base.apk -92e8b000-92e8bfff rw- 0 1000 -92e8c000-92e9dfff r-- d5b0000 12000 /data/app/com.example.app-2/base.apk -92e9e000-92ebcfff r-x d5c2000 1f000 /data/app/com.example.app-2/base.apk -92ebd000-92ebefff rw- d5e1000 2000 /data/app/com.example.app-2/base.apk -92ebf000-92ebffff rw- 0 1000 -''' - -# list of (address, size, path, offset) tuples that must appear in -# _TEST_MEMORY_MAP. Not all sections need to be listed. -_TEST_MEMORY_MAP_SECTIONS = [ - (0x923aa000, 0x18f000, '/data/app/com.example.app-2/base.apk', 0x8a9000), - (0x9255c000, 0x038000, '/data/app/com.example.app-2/base.apk', 0x213000), - (0x92594000, 0x02d000, '/data/app/com.example.app-2/base.apk', 0x87d000), - (0x925c1000, 0x213000, '/data/app/com.example.app-2/base.apk', 0xa37000), -] - -_EXPECTED_TEST_MEMORY_MAP = r'''memory map: -12c00000-12ccafff rw- 0 cb000 /dev/ashmem/dalvik-main space (deleted) -12ccb000-130cafff rw- cb000 400000 /dev/ashmem/dalvik-main space (deleted) -130cb000-32bfffff --- 4cb000 1fb35000 /dev/ashmem/dalvik-main space (deleted) -32c00000-32c00fff rw- 0 1000 /dev/ashmem/dalvik-main space 1 (deleted) -32c01000-52bfffff --- 1000 1ffff000 /dev/ashmem/dalvik-main space 1 (deleted) -6f3b8000-6fd90fff rw- 0 9d9000 /data/dalvik-cache/x86/system@framework@boot.art -6fd91000-71c42fff r-- 0 1eb2000 /data/dalvik-cache/x86/system@framework@boot.oat -71c43000-7393efff r-x 1eb2000 1cfc000 /data/dalvik-cache/x86/system@framework@boot.oat (load base 0x71c43000) -7393f000-7393ffff rw- 3bae000 1000 /data/dalvik-cache/x86/system@framework@boot.oat -73940000-73a1bfff rw- 0 dc000 /dev/ashmem/dalvik-zygote space (deleted) -73a1c000-73a1cfff rw- 0 1000 /dev/ashmem/dalvik-non moving space (deleted) -73a1d000-73a2dfff rw- 1000 11000 /dev/ashmem/dalvik-non moving space (deleted) -73a2e000-77540fff --- 12000 3b13000 /dev/ashmem/dalvik-non moving space (deleted) -77541000-7793ffff rw- 3b25000 3ff000 /dev/ashmem/dalvik-non moving space (deleted) -923aa000-92538fff r-- 8a9000 18f000 /data/app/com.example.app-2/base.apk -92539000-9255bfff r-- 0 23000 /data/data/com.example.app/app_data/paks/es.pak@162db1c6689 -9255c000-92593fff r-- 213000 38000 /data/app/com.example.app-2/base.apk -92594000-925c0fff r-- 87d000 2d000 /data/app/com.example.app-2/base.apk -925c1000-927d3fff r-- a37000 213000 /data/app/com.example.app-2/base.apk -927d4000-92e07fff r-- 24a000 634000 /data/app/com.example.app-2/base.apk -92e08000-92e37fff r-- a931000 30000 /data/app/com.example.app-2/base.apk!lib/libmanager.cr.so (offset 0x0) -92e38000-92e86fff r-x a961000 4f000 /data/app/com.example.app-2/base.apk!lib/libmanager.cr.so (offset 0x30000) -92e87000-92e8afff rw- a9b0000 4000 /data/app/com.example.app-2/base.apk!lib/libmanager.cr.so (offset 0x7f000) -92e8b000-92e8bfff rw- 0 1000 -92e8c000-92e9dfff r-- d5b0000 12000 /data/app/com.example.app-2/base.apk!lib/libuser_manager.cr.so (offset 0x0) -92e9e000-92ebcfff r-x d5c2000 1f000 /data/app/com.example.app-2/base.apk!lib/libuser_manager.cr.so (offset 0x12000) -92ebd000-92ebefff rw- d5e1000 2000 /data/app/com.example.app-2/base.apk!lib/libuser_manager.cr.so (offset 0x31000) -92ebf000-92ebffff rw- 0 1000 -''' - -# Example stack section, taken from the same tombstone that _TEST_MEMORY_MAP -# was extracted from. -_TEST_STACK = r'''stack: - bf89a070 b7439468 /system/lib/libc.so - bf89a074 bf89a1e4 [stack] - bf89a078 932d4000 /data/app/com.example.app-2/base.apk - bf89a07c b73bfbc9 /system/lib/libc.so (pthread_mutex_lock+65) - bf89a080 00000000 - bf89a084 4000671c /dev/ashmem/dalvik-main space 1 (deleted) - bf89a088 932d1d86 /data/app/com.example.app-2/base.apk - bf89a08c b743671c /system/lib/libc.so - bf89a090 b77f8c00 /system/bin/linker - bf89a094 b743cc90 - bf89a098 932d1d4a /data/app/com.example.app-2/base.apk - bf89a09c b73bf271 /system/lib/libc.so (__pthread_internal_find(long)+65) - bf89a0a0 b743cc90 - bf89a0a4 bf89a0b0 [stack] - bf89a0a8 bf89a0b8 [stack] - bf89a0ac 00000008 - ........ ........ - #00 bf89a0b0 00000006 - bf89a0b4 00000002 - bf89a0b8 b743671c /system/lib/libc.so - bf89a0bc b73bf5d9 /system/lib/libc.so (pthread_kill+71) - #01 bf89a0c0 00006937 - bf89a0c4 00006937 - bf89a0c8 00000006 - bf89a0cc b77fd3a9 /system/bin/app_process32 (sigprocmask+141) - bf89a0d0 00000002 - bf89a0d4 bf89a0ec [stack] - bf89a0d8 00000000 - bf89a0dc b743671c /system/lib/libc.so - bf89a0e0 bf89a12c [stack] - bf89a0e4 bf89a1e4 [stack] - bf89a0e8 932d1d4a /data/app/com.example.app-2/base.apk - bf89a0ec b7365206 /system/lib/libc.so (raise+37) - #02 bf89a0f0 b77f8c00 /system/bin/linker - bf89a0f4 00000006 - bf89a0f8 b7439468 /system/lib/libc.so - bf89a0fc b743671c /system/lib/libc.so - bf89a100 bf89a12c [stack] - bf89a104 b743671c /system/lib/libc.so - bf89a108 bf89a12c [stack] - bf89a10c b735e9e5 /system/lib/libc.so (abort+81) - #03 bf89a110 00000006 - bf89a114 bf89a12c [stack] - bf89a118 00000000 - bf89a11c b55a3d3b /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::DefaultLogHandler(google::protobuf::LogLevel, char const*, int, std::__1::basic_string, std::__1::allocator > const&)+99) - bf89a120 b7439468 /system/lib/libc.so - bf89a124 b55ba38d /system/lib/libprotobuf-cpp-lite.so - bf89a128 b55ba408 /system/lib/libprotobuf-cpp-lite.so - bf89a12c ffffffdf - bf89a130 0000003d - bf89a134 adfedf00 [anon:libc_malloc] - bf89a138 bf89a158 [stack] - #04 bf89a13c a0cee7f0 /data/app/com.example.app-2/base.apk - bf89a140 b55c1cb0 /system/lib/libprotobuf-cpp-lite.so - bf89a144 bf89a1e4 [stack] -''' - -# Expected value of _TEST_STACK after translation of addresses in the APK -# into offsets into libraries. -_EXPECTED_STACK = r'''stack: - bf89a070 b7439468 /system/lib/libc.so - bf89a074 bf89a1e4 [stack] - bf89a078 932d4000 /data/app/com.example.app-2/base.apk - bf89a07c b73bfbc9 /system/lib/libc.so (pthread_mutex_lock+65) - bf89a080 00000000 - bf89a084 4000671c /dev/ashmem/dalvik-main space 1 (deleted) - bf89a088 932d1d86 /data/app/com.example.app-2/base.apk - bf89a08c b743671c /system/lib/libc.so - bf89a090 b77f8c00 /system/bin/linker - bf89a094 b743cc90 - bf89a098 932d1d4a /data/app/com.example.app-2/base.apk - bf89a09c b73bf271 /system/lib/libc.so (__pthread_internal_find(long)+65) - bf89a0a0 b743cc90 - bf89a0a4 bf89a0b0 [stack] - bf89a0a8 bf89a0b8 [stack] - bf89a0ac 00000008 - ........ ........ - #00 bf89a0b0 00000006 - bf89a0b4 00000002 - bf89a0b8 b743671c /system/lib/libc.so - bf89a0bc b73bf5d9 /system/lib/libc.so (pthread_kill+71) - #01 bf89a0c0 00006937 - bf89a0c4 00006937 - bf89a0c8 00000006 - bf89a0cc b77fd3a9 /system/bin/app_process32 (sigprocmask+141) - bf89a0d0 00000002 - bf89a0d4 bf89a0ec [stack] - bf89a0d8 00000000 - bf89a0dc b743671c /system/lib/libc.so - bf89a0e0 bf89a12c [stack] - bf89a0e4 bf89a1e4 [stack] - bf89a0e8 932d1d4a /data/app/com.example.app-2/base.apk - bf89a0ec b7365206 /system/lib/libc.so (raise+37) - #02 bf89a0f0 b77f8c00 /system/bin/linker - bf89a0f4 00000006 - bf89a0f8 b7439468 /system/lib/libc.so - bf89a0fc b743671c /system/lib/libc.so - bf89a100 bf89a12c [stack] - bf89a104 b743671c /system/lib/libc.so - bf89a108 bf89a12c [stack] - bf89a10c b735e9e5 /system/lib/libc.so (abort+81) - #03 bf89a110 00000006 - bf89a114 bf89a12c [stack] - bf89a118 00000000 - bf89a11c b55a3d3b /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::DefaultLogHandler(google::protobuf::LogLevel, char const*, int, std::__1::basic_string, std::__1::allocator > const&)+99) - bf89a120 b7439468 /system/lib/libc.so - bf89a124 b55ba38d /system/lib/libprotobuf-cpp-lite.so - bf89a128 b55ba408 /system/lib/libprotobuf-cpp-lite.so - bf89a12c ffffffdf - bf89a130 0000003d - bf89a134 adfedf00 [anon:libc_malloc] - bf89a138 bf89a158 [stack] - #04 bf89a13c a0cee7f0 /data/app/com.example.app-2/base.apk - bf89a140 b55c1cb0 /system/lib/libprotobuf-cpp-lite.so - bf89a144 bf89a1e4 [stack] -''' - -_TEST_BACKTRACE = r'''backtrace: - #00 pc 00084126 /system/lib/libc.so (tgkill+22) - #01 pc 000815d8 /system/lib/libc.so (pthread_kill+70) - #02 pc 00027205 /system/lib/libc.so (raise+36) - #03 pc 000209e4 /system/lib/libc.so (abort+80) - #04 pc 0000cf73 /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogMessage::Finish()+117) - #05 pc 0000cf8e /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogFinisher::operator=(google::protobuf::internal::LogMessage&)+26) - #06 pc 0000d27f /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::VerifyVersion(int, int, char const*)+574) - #07 pc 007cd236 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000) - #08 pc 000111a9 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0xbfc2000) - #09 pc 00013228 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0xbfc2000) - #10 pc 000131de /data/app/com.google.android.apps.chrome-2/base.apk (offset 0xbfc2000) - #11 pc 007cd2d8 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000) - #12 pc 007cd956 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000) - #13 pc 007c2d4a /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000) - #14 pc 009fc9f1 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000) - #15 pc 009fc8ea /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000) - #16 pc 00561c63 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000) - #17 pc 0106fbdb /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000) - #18 pc 004d7371 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000) - #19 pc 004d8159 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000) - #20 pc 004d7b96 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000) - #21 pc 004da4b6 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000) - #22 pc 005ab66c /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000) - #23 pc 005afca2 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000) - #24 pc 0000cae8 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000) - #25 pc 00ce864f /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000) - #26 pc 00ce8dfa /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000) - #27 pc 00ce74c6 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000) - #28 pc 00004616 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x961e000) - #29 pc 00ce8215 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000) - #30 pc 0013d8c7 /system/lib/libart.so (art_quick_generic_jni_trampoline+71) - #31 pc 00137c52 /system/lib/libart.so (art_quick_invoke_static_stub+418) - #32 pc 00143651 /system/lib/libart.so (art::ArtMethod::Invoke(art::Thread*, unsigned int*, unsigned int, art::JValue*, char const*)+353) - #33 pc 005e06ae /system/lib/libart.so (artInterpreterToCompiledCodeBridge+190) - #34 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445) - #35 pc 0032cfc0 /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160) - #36 pc 000fc703 /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891) - #37 pc 00300af7 /system/lib/libart.so (artInterpreterToInterpreterBridge+188) - #38 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445) - #39 pc 0032cfc0 /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160) - #40 pc 000fc703 /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891) - #41 pc 00300af7 /system/lib/libart.so (artInterpreterToInterpreterBridge+188) - #42 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445) - #43 pc 0032ebf9 /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)2, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+297) - #44 pc 000fc955 /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+30485) - #45 pc 00300af7 /system/lib/libart.so (artInterpreterToInterpreterBridge+188) - #46 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445) - #47 pc 0033090c /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)4, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+636) - #48 pc 000fc67f /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29759) - #49 pc 00300700 /system/lib/libart.so (art::interpreter::EnterInterpreterFromEntryPoint(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame*)+128) - #50 pc 00667c73 /system/lib/libart.so (artQuickToInterpreterBridge+808) - #51 pc 0013d98d /system/lib/libart.so (art_quick_to_interpreter_bridge+77) - #52 pc 7264bc5b /data/dalvik-cache/x86/system@framework@boot.oat (offset 0x1eb2000) -''' - -_EXPECTED_BACKTRACE = r'''backtrace: - #00 pc 00084126 /system/lib/libc.so (tgkill+22) - #01 pc 000815d8 /system/lib/libc.so (pthread_kill+70) - #02 pc 00027205 /system/lib/libc.so (raise+36) - #03 pc 000209e4 /system/lib/libc.so (abort+80) - #04 pc 0000cf73 /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogMessage::Finish()+117) - #05 pc 0000cf8e /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogFinisher::operator=(google::protobuf::internal::LogMessage&)+26) - #06 pc 0000d27f /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::VerifyVersion(int, int, char const*)+574) - #07 pc 007cd236 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000) - #08 pc 000111a9 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so (offset 0x1c000) - #09 pc 00013228 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so (offset 0x1c000) - #10 pc 000131de /data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so (offset 0x1c000) - #11 pc 007cd2d8 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000) - #12 pc 007cd956 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000) - #13 pc 007c2d4a /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000) - #14 pc 009fc9f1 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000) - #15 pc 009fc8ea /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000) - #16 pc 00561c63 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000) - #17 pc 0106fbdb /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000) - #18 pc 004d7371 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000) - #19 pc 004d8159 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000) - #20 pc 004d7b96 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000) - #21 pc 004da4b6 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000) - #22 pc 005ab66c /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000) - #23 pc 005afca2 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000) - #24 pc 0000cae8 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000) - #25 pc 00ce864f /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000) - #26 pc 00ce8dfa /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000) - #27 pc 00ce74c6 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000) - #28 pc 00004616 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libembedder.cr.so (offset 0x28000) - #29 pc 00ce8215 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000) - #30 pc 0013d8c7 /system/lib/libart.so (art_quick_generic_jni_trampoline+71) - #31 pc 00137c52 /system/lib/libart.so (art_quick_invoke_static_stub+418) - #32 pc 00143651 /system/lib/libart.so (art::ArtMethod::Invoke(art::Thread*, unsigned int*, unsigned int, art::JValue*, char const*)+353) - #33 pc 005e06ae /system/lib/libart.so (artInterpreterToCompiledCodeBridge+190) - #34 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445) - #35 pc 0032cfc0 /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160) - #36 pc 000fc703 /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891) - #37 pc 00300af7 /system/lib/libart.so (artInterpreterToInterpreterBridge+188) - #38 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445) - #39 pc 0032cfc0 /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160) - #40 pc 000fc703 /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891) - #41 pc 00300af7 /system/lib/libart.so (artInterpreterToInterpreterBridge+188) - #42 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445) - #43 pc 0032ebf9 /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)2, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+297) - #44 pc 000fc955 /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+30485) - #45 pc 00300af7 /system/lib/libart.so (artInterpreterToInterpreterBridge+188) - #46 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445) - #47 pc 0033090c /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)4, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+636) - #48 pc 000fc67f /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29759) - #49 pc 00300700 /system/lib/libart.so (art::interpreter::EnterInterpreterFromEntryPoint(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame*)+128) - #50 pc 00667c73 /system/lib/libart.so (artQuickToInterpreterBridge+808) - #51 pc 0013d98d /system/lib/libart.so (art_quick_to_interpreter_bridge+77) - #52 pc 7264bc5b /data/dalvik-cache/x86/system@framework@boot.oat (offset 0x1eb2000) -''' - -_EXPECTED_BACKTRACE_OFFSETS_MAP = { - '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so': - set([ - 0x1c000 + 0x111a9, - 0x1c000 + 0x13228, - 0x1c000 + 0x131de, - ]), - - '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so': - set([ - 0x90e000 + 0x7cd236, - 0x90e000 + 0x7cd2d8, - 0x90e000 + 0x7cd956, - 0x90e000 + 0x7c2d4a, - 0x90e000 + 0x9fc9f1, - 0x90e000 + 0x9fc8ea, - 0x90e000 + 0x561c63, - 0x90e000 + 0x106fbdb, - 0x90e000 + 0x4d7371, - 0x90e000 + 0x4d8159, - 0x90e000 + 0x4d7b96, - 0x90e000 + 0x4da4b6, - 0x90e000 + 0xcae8, - ]), - '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so': - set([ - 0xc2d000 + 0x5ab66c, - 0xc2d000 + 0x5afca2, - 0xc2d000 + 0xce864f, - 0xc2d000 + 0xce8dfa, - 0xc2d000 + 0xce74c6, - 0xc2d000 + 0xce8215, - ]), - '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libembedder.cr.so': - set([ - 0x28000 + 0x4616, - ]) -} - -# pylint: enable=line-too-long - -_ONE_MB = 1024 * 1024 -_TEST_SYMBOL_DATA = { - # Regular symbols - 0: 'mock_sym_for_addr_0 [mock_src/libmock1.so.c:0]', - 0x1000: 'mock_sym_for_addr_4096 [mock_src/libmock1.so.c:4096]', - - # Symbols without source file path. - _ONE_MB: 'mock_sym_for_addr_1048576 [??:0]', - _ONE_MB + 0x8234: 'mock_sym_for_addr_1081908 [??:0]', - - # Unknown symbol. - 2 * _ONE_MB: '?? [??:0]', - - # Inlined symbol. - 3 * _ONE_MB: - 'mock_sym_for_addr_3145728_inner [mock_src/libmock1.so.c:3145728]', -} - -@contextlib.contextmanager -def _TempDir(): - dirname = tempfile.mkdtemp() - try: - yield dirname - finally: - shutil.rmtree(dirname) - - -def _TouchFile(path): - # Create parent directories. - try: - os.makedirs(os.path.dirname(path)) - except OSError: - pass - with open(path, 'a'): - os.utime(path, None) - -class MockApkTranslator(object): - """A mock ApkLibraryPathTranslator object used for testing.""" - - # Regex that matches the content of APK native library map files generated - # with apk_lib_dump.py. - _RE_MAP_FILE = re.compile( - r'0x(?P[0-9a-f]+)\s+' + - r'0x(?P[0-9a-f]+)\s+' + - r'0x(?P[0-9a-f]+)\s+' + - r'0x(?P[0-9a-f]+)\s+') - - def __init__(self, test_apk_libs=None): - """Initialize instance. - - Args: - test_apk_libs: Optional list of (file_start, file_end, size, lib_path) - tuples, like _TEST_APK_LIBS for example. This will be used to - implement TranslatePath(). - """ - self._apk_libs = [] - if test_apk_libs: - self._AddLibEntries(test_apk_libs) - - def _AddLibEntries(self, entries): - self._apk_libs = sorted(self._apk_libs + entries, - lambda x, y: cmp(x[0], y[0])) - - def ReadMapFile(self, file_path): - """Read an .apk.native-libs file that was produced with apk_lib_dump.py. - - Args: - file_path: input path to .apk.native-libs file. Its format is - essentially: 0x 0x 0x - """ - new_libs = [] - with open(file_path) as f: - for line in f.readlines(): - m = MockApkTranslator._RE_MAP_FILE.match(line) - if m: - file_start = int(m.group('file_start'), 16) - file_end = int(m.group('file_end'), 16) - file_size = int(m.group('file_size'), 16) - lib_path = m.group('lib_path') - # Sanity check - if file_start + file_size != file_end: - logging.warning('%s: Inconsistent (start, end, size) values ' - '(0x%x, 0x%x, 0x%x)', - file_path, file_start, file_end, file_size) - else: - new_libs.append((file_start, file_end, file_size, lib_path)) - - self._AddLibEntries(new_libs) - - def TranslatePath(self, lib_path, lib_offset): - """Translate an APK file path + offset into a library path + offset.""" - min_pos = 0 - max_pos = len(self._apk_libs) - while min_pos < max_pos: - mid_pos = (min_pos + max_pos) / 2 - mid_entry = self._apk_libs[mid_pos] - mid_offset = mid_entry[0] - mid_size = mid_entry[2] - if lib_offset < mid_offset: - max_pos = mid_pos - elif lib_offset >= mid_offset + mid_size: - min_pos = mid_pos + 1 - else: - # Found it - new_path = '%s!lib/%s' % (lib_path, mid_entry[3]) - new_offset = lib_offset - mid_offset - return (new_path, new_offset) - - return lib_path, lib_offset - - -class HostLibraryFinderTest(unittest.TestCase): - - def testEmpty(self): - finder = symbol_utils.HostLibraryFinder() - self.assertIsNone(finder.Find('/data/data/com.example.app-1/lib/libfoo.so')) - self.assertIsNone( - finder.Find('/data/data/com.example.app-1/base.apk!lib/libfoo.so')) - - - def testSimpleDirectory(self): - finder = symbol_utils.HostLibraryFinder() - with _TempDir() as tmp_dir: - host_libfoo_path = os.path.join(tmp_dir, 'libfoo.so') - host_libbar_path = os.path.join(tmp_dir, 'libbar.so') - _TouchFile(host_libfoo_path) - _TouchFile(host_libbar_path) - - finder.AddSearchDir(tmp_dir) - - # Regular library path (extracted at installation by the PackageManager). - # Note that the extraction path has changed between Android releases, - # i.e. it can be /data/app/, /data/data/ or /data/app-lib/ depending - # on the system. - self.assertEqual( - host_libfoo_path, - finder.Find('/data/app-lib/com.example.app-1/lib/libfoo.so')) - - # Verify that the path doesn't really matter - self.assertEqual( - host_libfoo_path, - finder.Find('/whatever/what.apk!lib/libfoo.so')) - - self.assertEqual( - host_libbar_path, - finder.Find('/data/data/com.example.app-1/lib/libbar.so')) - - self.assertIsNone( - finder.Find('/data/data/com.example.app-1/lib/libunknown.so')) - - - def testMultipleDirectories(self): - with _TempDir() as tmp_dir: - # Create the following files: - # /aaa/ - # libfoo.so - # /bbb/ - # libbar.so - # libfoo.so (this one should never be seen because 'aaa' - # shall be first in the search path list). - # - aaa_dir = os.path.join(tmp_dir, 'aaa') - bbb_dir = os.path.join(tmp_dir, 'bbb') - os.makedirs(aaa_dir) - os.makedirs(bbb_dir) - - host_libfoo_path = os.path.join(aaa_dir, 'libfoo.so') - host_libbar_path = os.path.join(bbb_dir, 'libbar.so') - host_libfoo2_path = os.path.join(bbb_dir, 'libfoo.so') - - _TouchFile(host_libfoo_path) - _TouchFile(host_libbar_path) - _TouchFile(host_libfoo2_path) - - finder = symbol_utils.HostLibraryFinder() - finder.AddSearchDir(aaa_dir) - finder.AddSearchDir(bbb_dir) - - self.assertEqual( - host_libfoo_path, - finder.Find('/data/data/com.example.app-1/lib/libfoo.so')) - - self.assertEqual( - host_libfoo_path, - finder.Find('/data/whatever/base.apk!lib/libfoo.so')) - - self.assertEqual( - host_libbar_path, - finder.Find('/data/data/com.example.app-1/lib/libbar.so')) - - self.assertIsNone( - finder.Find('/data/data/com.example.app-1/lib/libunknown.so')) - - -class ElfSymbolResolverTest(unittest.TestCase): - - def testCreation(self): - resolver = symbol_utils.ElfSymbolResolver( - addr2line_path_for_tests=_MOCK_A2L_PATH) - self.assertTrue(resolver) - - def testWithSimpleOffsets(self): - resolver = symbol_utils.ElfSymbolResolver( - addr2line_path_for_tests=_MOCK_A2L_PATH) - resolver.SetAndroidAbi('ignored-abi') - - for addr, expected_sym in _TEST_SYMBOL_DATA.iteritems(): - self.assertEqual(resolver.FindSymbolInfo('/some/path/libmock1.so', addr), - expected_sym) - - def testWithPreResolvedSymbols(self): - resolver = symbol_utils.ElfSymbolResolver( - addr2line_path_for_tests=_MOCK_A2L_PATH) - resolver.SetAndroidAbi('ignored-abi') - resolver.AddLibraryOffsets('/some/path/libmock1.so', - _TEST_SYMBOL_DATA.keys()) - - resolver.DisallowSymbolizerForTesting() - - for addr, expected_sym in _TEST_SYMBOL_DATA.iteritems(): - sym_info = resolver.FindSymbolInfo('/some/path/libmock1.so', addr) - self.assertIsNotNone(sym_info, 'None symbol info for addr %x' % addr) - self.assertEqual( - sym_info, expected_sym, - 'Invalid symbol info for addr %x [%s] expected [%s]' % ( - addr, sym_info, expected_sym)) - - -class MemoryMapTest(unittest.TestCase): - - def testCreation(self): - mem_map = symbol_utils.MemoryMap('test-abi32') - self.assertIsNone(mem_map.FindSectionForAddress(0)) - - def testParseLines(self): - mem_map = symbol_utils.MemoryMap('test-abi32') - mem_map.ParseLines(_TEST_MEMORY_MAP.splitlines()) - for exp_addr, exp_size, exp_path, exp_offset in _TEST_MEMORY_MAP_SECTIONS: - text = '(addr:%x, size:%x, path:%s, offset=%x)' % ( - exp_addr, exp_size, exp_path, exp_offset) - - t = mem_map.FindSectionForAddress(exp_addr) - self.assertTrue(t, 'Could not find %s' % text) - self.assertEqual(t.address, exp_addr) - self.assertEqual(t.size, exp_size) - self.assertEqual(t.offset, exp_offset) - self.assertEqual(t.path, exp_path) - - def testTranslateLine(self): - android_abi = 'test-abi' - apk_translator = MockApkTranslator(_TEST_APK_LIBS) - mem_map = symbol_utils.MemoryMap(android_abi) - for line, expected_line in zip(_TEST_MEMORY_MAP.splitlines(), - _EXPECTED_TEST_MEMORY_MAP.splitlines()): - self.assertEqual(mem_map.TranslateLine(line, apk_translator), - expected_line) - -class StackTranslatorTest(unittest.TestCase): - - def testSimpleStack(self): - android_abi = 'test-abi32' - mem_map = symbol_utils.MemoryMap(android_abi) - mem_map.ParseLines(_TEST_MEMORY_MAP) - apk_translator = MockApkTranslator(_TEST_APK_LIBS) - stack_translator = symbol_utils.StackTranslator(android_abi, mem_map, - apk_translator) - input_stack = _TEST_STACK.splitlines() - expected_stack = _EXPECTED_STACK.splitlines() - self.assertEqual(len(input_stack), len(expected_stack)) - for stack_line, expected_line in zip(input_stack, expected_stack): - new_line = stack_translator.TranslateLine(stack_line) - self.assertEqual(new_line, expected_line) - - -class MockSymbolResolver(symbol_utils.SymbolResolver): - - # A regex matching a symbol definition as it appears in a test symbol file. - # Format is: - _RE_SYMBOL_DEFINITION = re.compile( - r'(?P[0-9a-f]+)\s+(?P.*)') - - def __init__(self): - super(MockSymbolResolver, self).__init__() - self._map = collections.defaultdict(dict) - - def AddTestLibrarySymbols(self, lib_name, offsets_map): - """Add a new test entry for a given library name. - - Args: - lib_name: Library name (e.g. 'libfoo.so') - offsets_map: A mapping from offsets to symbol info strings. - """ - self._map[lib_name] = offsets_map - - def ReadTestFile(self, file_path, lib_name): - """Read a single test symbol file, matching a given library. - - Args: - file_path: Input file path. - lib_name: Library name these symbols correspond to (e.g. 'libfoo.so') - """ - with open(file_path) as f: - for line in f.readlines(): - line = line.rstrip() - m = MockSymbolResolver._RE_SYMBOL_DEFINITION.match(line) - if m: - offset = int(m.group('offset')) - symbol = m.group('symbol') - self._map[lib_name][offset] = symbol - - def ReadTestFilesInDir(self, dir_path, file_suffix): - """Read all symbol test files in a given directory. - - Args: - dir_path: Directory path. - file_suffix: File suffix used to detect test symbol files. - """ - for filename in os.listdir(dir_path): - if filename.endswith(file_suffix): - lib_name = filename[:-len(file_suffix)] - self.ReadTestFile(os.path.join(dir_path, filename), lib_name) - - def FindSymbolInfo(self, device_path, device_offset): - """Implement SymbolResolver.FindSymbolInfo.""" - lib_name = os.path.basename(device_path) - offsets = self._map.get(lib_name) - if not offsets: - return None - - return offsets.get(device_offset) - - -class BacktraceTranslatorTest(unittest.TestCase): - - def testEmpty(self): - android_abi = 'test-abi' - apk_translator = MockApkTranslator() - backtrace_translator = symbol_utils.BacktraceTranslator(android_abi, - apk_translator) - self.assertTrue(backtrace_translator) - - def testFindLibraryOffsets(self): - android_abi = 'test-abi' - apk_translator = MockApkTranslator(_TEST_APK_LIBS) - backtrace_translator = symbol_utils.BacktraceTranslator(android_abi, - apk_translator) - input_backtrace = _EXPECTED_BACKTRACE.splitlines() - expected_lib_offsets_map = _EXPECTED_BACKTRACE_OFFSETS_MAP - offset_map = backtrace_translator.FindLibraryOffsets(input_backtrace) - for lib_path, offsets in offset_map.iteritems(): - self.assertTrue(lib_path in expected_lib_offsets_map, - '%s is not in expected library-offsets map!' % lib_path) - sorted_offsets = sorted(offsets) - sorted_expected_offsets = sorted(expected_lib_offsets_map[lib_path]) - self.assertEqual(sorted_offsets, sorted_expected_offsets, - '%s has invalid offsets %s expected %s' % ( - lib_path, sorted_offsets, sorted_expected_offsets)) - - def testTranslateLine(self): - android_abi = 'test-abi' - apk_translator = MockApkTranslator(_TEST_APK_LIBS) - backtrace_translator = symbol_utils.BacktraceTranslator(android_abi, - apk_translator) - input_backtrace = _TEST_BACKTRACE.splitlines() - expected_backtrace = _EXPECTED_BACKTRACE.splitlines() - self.assertEqual(len(input_backtrace), len(expected_backtrace)) - for trace_line, expected_line in zip(input_backtrace, expected_backtrace): - line = backtrace_translator.TranslateLine(trace_line, - MockSymbolResolver()) - self.assertEqual(line, expected_line) - - -if __name__ == '__main__': - unittest.main() diff --git a/build/android/pylib/utils/app_bundle_utils.py b/build/android/pylib/utils/app_bundle_utils.py index b2e99273f136..9a52d852358c 100644 --- a/build/android/pylib/utils/app_bundle_utils.py +++ b/build/android/pylib/utils/app_bundle_utils.py @@ -1,13 +1,15 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import json import logging import os +import pathlib import re +import shutil import sys -import tempfile +import zipfile sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'gyp')) @@ -16,21 +18,24 @@ from util import resource_utils import bundletool -# List of valid modes for GenerateBundleApks() -BUILD_APKS_MODES = ('default', 'universal', 'system', 'system_compressed') +# "system_apks" is "default", but with locale list and compressed dex. +_SYSTEM_MODES = ('system', 'system_apks') +BUILD_APKS_MODES = _SYSTEM_MODES + ('default', 'universal') OPTIMIZE_FOR_OPTIONS = ('ABI', 'SCREEN_DENSITY', 'LANGUAGE', 'TEXTURE_COMPRESSION_FORMAT') -_SYSTEM_MODES = ('system_compressed', 'system') _ALL_ABIS = ['armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64'] +def _BundleMinSdkVersion(bundle_path): + manifest_data = bundletool.RunBundleTool( + ['dump', 'manifest', '--bundle', bundle_path]) + return int(re.search(r'minSdkVersion.*?(\d+)', manifest_data).group(1)) + + def _CreateDeviceSpec(bundle_path, sdk_version, locales): if not sdk_version: - manifest_data = bundletool.RunBundleTool( - ['dump', 'manifest', '--bundle', bundle_path]) - sdk_version = int( - re.search(r'minSdkVersion.*?(\d+)', manifest_data).group(1)) + sdk_version = _BundleMinSdkVersion(bundle_path) # Setting sdkVersion=minSdkVersion prevents multiple per-minSdkVersion .apk # files from being created within the .apks file. @@ -42,6 +47,20 @@ def _CreateDeviceSpec(bundle_path, sdk_version, locales): } +def _FixBundleDexCompressionGlob(src_bundle, dst_bundle): + # Modifies the BundleConfig.pb of the given .aab to add "classes*.dex" to the + # "uncompressedGlob" list. + with zipfile.ZipFile(src_bundle) as src, \ + zipfile.ZipFile(dst_bundle, 'w') as dst: + for info in src.infolist(): + data = src.read(info) + if info.filename == 'BundleConfig.pb': + # A classesX.dex entry is added by create_app_bundle.py so that we can + # modify it here in order to have it take effect. b/176198991 + data = data.replace(b'classesX.dex', b'classes*.dex') + dst.writestr(info, data) + + def GenerateBundleApks(bundle_path, bundle_apks_path, aapt2_path, @@ -49,6 +68,7 @@ def GenerateBundleApks(bundle_path, keystore_password, keystore_alias, mode=None, + local_testing=False, minimal=False, minimal_sdk_version=None, check_for_noop=True, @@ -97,23 +117,38 @@ def GenerateBundleApks(bundle_path, def rebuild(): logging.info('Building %s', bundle_apks_path) - with tempfile.NamedTemporaryFile(suffix='.apks') as tmp_apks_file: + with build_utils.TempDir() as tmp_dir: + tmp_apks_file = os.path.join(tmp_dir, 'output.apks') cmd_args = [ 'build-apks', '--aapt2=%s' % aapt2_path, - '--output=%s' % tmp_apks_file.name, - '--bundle=%s' % bundle_path, + '--output=%s' % tmp_apks_file, '--ks=%s' % keystore_path, '--ks-pass=pass:%s' % keystore_password, '--ks-key-alias=%s' % keystore_alias, '--overwrite', ] + input_bundle_path = bundle_path + # Work around bundletool not respecting uncompressDexFiles setting. + # b/176198991 + if mode not in _SYSTEM_MODES and _BundleMinSdkVersion(bundle_path) >= 27: + input_bundle_path = os.path.join(tmp_dir, 'system.aab') + _FixBundleDexCompressionGlob(bundle_path, input_bundle_path) + + cmd_args += ['--bundle=%s' % input_bundle_path] + + if local_testing: + cmd_args += ['--local-testing'] if mode is not None: if mode not in BUILD_APKS_MODES: raise Exception('Invalid mode parameter %s (should be in %s)' % (mode, BUILD_APKS_MODES)) - cmd_args += ['--mode=' + mode] + if mode != 'system_apks': + cmd_args += ['--mode=' + mode] + else: + # Specify --optimize-for to prevent language splits being created. + cmd_args += ['--optimize-for=device_tier'] if optimize_for: if optimize_for not in OPTIMIZE_FOR_OPTIONS: @@ -122,32 +157,27 @@ def rebuild(): (mode, OPTIMIZE_FOR_OPTIONS)) cmd_args += ['--optimize-for=' + optimize_for] - with tempfile.NamedTemporaryFile(mode='w', suffix='.json') as spec_file: - if device_spec: - json.dump(device_spec, spec_file) - spec_file.flush() - cmd_args += ['--device-spec=' + spec_file.name] - bundletool.RunBundleTool(cmd_args) + if device_spec: + data = json.dumps(device_spec) + logging.debug('Device Spec: %s', data) + spec_file = pathlib.Path(tmp_dir) / 'device.json' + spec_file.write_text(data) + cmd_args += ['--device-spec=' + str(spec_file)] + + bundletool.RunBundleTool(cmd_args) - # Make the resulting .apks file hermetic. - with build_utils.TempDir() as temp_dir, \ - build_utils.AtomicOutput(bundle_apks_path, only_if_changed=False) as f: - files = build_utils.ExtractAll(tmp_apks_file.name, temp_dir) - build_utils.DoZip(files, f, base_dir=temp_dir) + shutil.move(tmp_apks_file, bundle_apks_path) if check_for_noop: - # NOTE: BUNDLETOOL_JAR_PATH is added to input_strings, rather than - # input_paths, to speed up MD5 computations by about 400ms (the .jar file - # contains thousands of class files which are checked independently, - # resulting in an .md5.stamp of more than 60000 lines!). - input_paths = [bundle_path, aapt2_path, keystore_path] + input_paths = [ + bundle_path, + bundletool.BUNDLETOOL_JAR_PATH, + aapt2_path, + keystore_path, + ] input_strings = [ keystore_password, keystore_alias, - bundletool.BUNDLETOOL_JAR_PATH, - # NOTE: BUNDLETOOL_VERSION is already part of BUNDLETOOL_JAR_PATH, but - # it's simpler to assume that this may not be the case in the future. - bundletool.BUNDLETOOL_VERSION, device_spec, ] if mode is not None: diff --git a/build/android/pylib/utils/argparse_utils.py b/build/android/pylib/utils/argparse_utils.py index 06544a2b0e5b..698be786310e 100644 --- a/build/android/pylib/utils/argparse_utils.py +++ b/build/android/pylib/utils/argparse_utils.py @@ -1,8 +1,8 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import print_function + import argparse @@ -23,7 +23,7 @@ class CustomHelpAction(argparse.Action): help='What this helps with') ''' # Derived from argparse._HelpAction from - # https://github.com/python/cpython/blob/master/Lib/argparse.py + # https://github.com/python/cpython/blob/main/Lib/argparse.py # pylint: disable=redefined-builtin # (complains about 'help' being redefined) @@ -33,11 +33,11 @@ def __init__(self, default=argparse.SUPPRESS, custom_help_text=None, help=None): - super(CustomHelpAction, self).__init__(option_strings=option_strings, - dest=dest, - default=default, - nargs=0, - help=help) + super().__init__(option_strings=option_strings, + dest=dest, + default=default, + nargs=0, + help=help) if not custom_help_text: raise ValueError('custom_help_text is required') diff --git a/build/android/pylib/utils/chrome_proxy_utils.py b/build/android/pylib/utils/chrome_proxy_utils.py index 149d0b9c8c5b..14960f41f797 100644 --- a/build/android/pylib/utils/chrome_proxy_utils.py +++ b/build/android/pylib/utils/chrome_proxy_utils.py @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Utilities for setting up and tear down WPR and TsProxy service.""" @@ -18,7 +18,7 @@ DEFAULT_UPLOAD_BANDWIDTH_KBPS = 72000 -class WPRServer(object): +class WPRServer: """Utils to set up a webpagereplay_go_server instance.""" def __init__(self): @@ -88,7 +88,7 @@ def archive_path(self): return self._archive_path -class ChromeProxySession(object): +class ChromeProxySession: """Utils to help set up a Chrome Proxy.""" def __init__(self, device_proxy_port=DEFAULT_DEVICE_PORT): diff --git a/build/android/pylib/utils/chrome_proxy_utils_test.py b/build/android/pylib/utils/chrome_proxy_utils_test.py index b38b268fe8a3..2b8981204333 100755 --- a/build/android/pylib/utils/chrome_proxy_utils_test.py +++ b/build/android/pylib/utils/chrome_proxy_utils_test.py @@ -1,5 +1,5 @@ -#!/usr/bin/env vpython -# Copyright 2020 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Tests for chrome_proxy_utils.""" @@ -91,7 +91,7 @@ def test_Stop_WithProperties(self, ts_proxy_mock, wpr_mock, forwarder_mock): wpr_mock.assert_called_once_with() ts_proxy_mock.assert_called_once_with() self.assertFalse(chrome_proxy.wpr_replay_mode) - self.assertEquals(chrome_proxy.wpr_archive_path, os.path.abspath(__file__)) + self.assertEqual(chrome_proxy.wpr_archive_path, os.path.abspath(__file__)) def test_SetWPRRecordMode(self): chrome_proxy = chrome_proxy_utils.ChromeProxySession(4) @@ -108,7 +108,7 @@ def test_SetWPRRecordMode(self): def test_SetWPRArchivePath(self): chrome_proxy = chrome_proxy_utils.ChromeProxySession(4) chrome_proxy._wpr_server._archive_path = 'abc' - self.assertEquals(chrome_proxy.wpr_archive_path, 'abc') + self.assertEqual(chrome_proxy.wpr_archive_path, 'abc') def test_UseDefaultDeviceProxyPort(self): chrome_proxy = chrome_proxy_utils.ChromeProxySession() @@ -117,7 +117,7 @@ def test_UseDefaultDeviceProxyPort(self): 'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I=', '--proxy-server=socks5://localhost:1080' ] - self.assertEquals(chrome_proxy.device_proxy_port, 1080) + self.assertEqual(chrome_proxy.device_proxy_port, 1080) self.assertListEqual(chrome_proxy.GetFlags(), expected_flags) def test_UseNewDeviceProxyPort(self): @@ -127,7 +127,7 @@ def test_UseNewDeviceProxyPort(self): 'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I=', '--proxy-server=socks5://localhost:1' ] - self.assertEquals(chrome_proxy.device_proxy_port, 1) + self.assertEqual(chrome_proxy.device_proxy_port, 1) self.assertListEqual(chrome_proxy.GetFlags(), expected_flags) diff --git a/build/android/pylib/utils/decorators.py b/build/android/pylib/utils/decorators.py index 8eec1d1e58e6..0cef420b3764 100644 --- a/build/android/pylib/utils/decorators.py +++ b/build/android/pylib/utils/decorators.py @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/pylib/utils/decorators_test.py b/build/android/pylib/utils/decorators_test.py index 73a9f0de6697..f8d9075916ab 100755 --- a/build/android/pylib/utils/decorators_test.py +++ b/build/android/pylib/utils/decorators_test.py @@ -1,5 +1,5 @@ -#!/usr/bin/env vpython -# Copyright 2017 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -35,8 +35,8 @@ def raiseException(): def doesNotRaiseException(): return 999 - self.assertEquals(raiseException(), 111) - self.assertEquals(doesNotRaiseException(), 999) + self.assertEqual(raiseException(), 111) + self.assertEqual(doesNotRaiseException(), 999) class MemoizeDecoratorTest(unittest.TestCase): @@ -79,13 +79,13 @@ def notMemoized(): return notMemoized.count notMemoized.count = 0 - self.assertEquals(memoized(), 1) - self.assertEquals(memoized(), 1) - self.assertEquals(memoized(), 1) + self.assertEqual(memoized(), 1) + self.assertEqual(memoized(), 1) + self.assertEqual(memoized(), 1) - self.assertEquals(notMemoized(), 1) - self.assertEquals(notMemoized(), 2) - self.assertEquals(notMemoized(), 3) + self.assertEqual(notMemoized(), 1) + self.assertEqual(notMemoized(), 2) + self.assertEqual(notMemoized(), 3) def testFunctionMemoizedBasedOnArgs(self): """Tests that |Memoize| caches results based on args and kwargs.""" @@ -94,10 +94,10 @@ def testFunctionMemoizedBasedOnArgs(self): def returnValueBasedOnArgsKwargs(a, k=0): return a + k - self.assertEquals(returnValueBasedOnArgsKwargs(1, 1), 2) - self.assertEquals(returnValueBasedOnArgsKwargs(1, 2), 3) - self.assertEquals(returnValueBasedOnArgsKwargs(2, 1), 3) - self.assertEquals(returnValueBasedOnArgsKwargs(3, 3), 6) + self.assertEqual(returnValueBasedOnArgsKwargs(1, 1), 2) + self.assertEqual(returnValueBasedOnArgsKwargs(1, 2), 3) + self.assertEqual(returnValueBasedOnArgsKwargs(2, 1), 3) + self.assertEqual(returnValueBasedOnArgsKwargs(3, 3), 6) if __name__ == '__main__': diff --git a/build/android/pylib/utils/device_dependencies.py b/build/android/pylib/utils/device_dependencies.py index 9cb5bd892a88..5f3f1edb6f02 100644 --- a/build/android/pylib/utils/device_dependencies.py +++ b/build/android/pylib/utils/device_dependencies.py @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -7,16 +7,19 @@ from pylib import constants - _EXCLUSIONS = [ - re.compile(r'.*OWNERS'), # Should never be included. + # Misc files that exist to document directories + re.compile(r'.*METADATA'), + re.compile(r'.*OWNERS'), + re.compile(r'.*\.md'), re.compile(r'.*\.crx'), # Chrome extension zip files. - re.compile(os.path.join('.*', - r'\.git.*')), # Any '.git*' directories/files. + re.compile(r'.*/\.git.*'), # Any '.git*' directories/files. re.compile(r'.*\.so'), # Libraries packed into .apk. re.compile(r'.*Mojo.*manifest\.json'), # Some source_set()s pull these in. re.compile(r'.*\.py'), # Some test_support targets include python deps. re.compile(r'.*\.apk'), # Should be installed separately. + re.compile(r'.*\.jar'), # Never need java intermediates. + re.compile(r'.*\.crx'), # Used by download_from_google_storage. re.compile(r'.*lib.java/.*'), # Never need java intermediates. # Test filter files: @@ -30,21 +33,27 @@ # v8's blobs and icu data get packaged into APKs. re.compile(r'.*snapshot_blob.*\.bin'), - re.compile(r'.*icudtl.bin'), + re.compile(r'.*icudtl\.bin'), # Scripts that are needed by swarming, but not on devices: re.compile(r'.*llvm-symbolizer'), - re.compile(r'.*md5sum_bin'), - re.compile(os.path.join('.*', 'development', 'scripts', 'stack')), + re.compile(r'.*md5sum_(?:bin|dist)'), + re.compile(r'.*/development/scripts/stack'), + re.compile(r'.*/build/android/pylib/symbols'), + re.compile(r'.*/build/android/stacktrace'), # Required for java deobfuscation on the host: re.compile(r'.*build/android/stacktrace/.*'), re.compile(r'.*third_party/jdk/.*'), re.compile(r'.*third_party/proguard/.*'), + # Our tests don't need these. + re.compile(r'.*/devtools-frontend/src/front_end/.*'), + # Build artifacts: re.compile(r'.*\.stamp'), - re.compile(r'.*.pak\.info'), + re.compile(r'.*\.pak\.info'), + re.compile(r'.*\.build_config.json'), re.compile(r'.*\.incremental\.json'), ] diff --git a/build/android/pylib/utils/device_dependencies_test.py b/build/android/pylib/utils/device_dependencies_test.py index b2da5a7ee580..2ff937ee6c5c 100755 --- a/build/android/pylib/utils/device_dependencies_test.py +++ b/build/android/pylib/utils/device_dependencies_test.py @@ -1,5 +1,5 @@ -#! /usr/bin/env vpython -# Copyright 2016 The Chromium Authors. All rights reserved. +#! /usr/bin/env vpython3 +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -16,40 +16,36 @@ def testCheckedInFile(self): test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'foo', 'bar', 'baz.txt') output_directory = os.path.join( constants.DIR_SOURCE_ROOT, 'out-foo', 'Release') - self.assertEquals( - [None, 'foo', 'bar', 'baz.txt'], - device_dependencies.DevicePathComponentsFor( - test_path, output_directory)) + self.assertEqual([None, 'foo', 'bar', 'baz.txt'], + device_dependencies.DevicePathComponentsFor( + test_path, output_directory)) def testOutputDirectoryFile(self): test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release', 'icudtl.dat') output_directory = os.path.join( constants.DIR_SOURCE_ROOT, 'out-foo', 'Release') - self.assertEquals( - [None, 'icudtl.dat'], - device_dependencies.DevicePathComponentsFor( - test_path, output_directory)) + self.assertEqual([None, 'icudtl.dat'], + device_dependencies.DevicePathComponentsFor( + test_path, output_directory)) def testOutputDirectorySubdirFile(self): test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release', 'test_dir', 'icudtl.dat') output_directory = os.path.join( constants.DIR_SOURCE_ROOT, 'out-foo', 'Release') - self.assertEquals( - [None, 'test_dir', 'icudtl.dat'], - device_dependencies.DevicePathComponentsFor( - test_path, output_directory)) + self.assertEqual([None, 'test_dir', 'icudtl.dat'], + device_dependencies.DevicePathComponentsFor( + test_path, output_directory)) def testOutputDirectoryPakFile(self): test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release', 'foo.pak') output_directory = os.path.join( constants.DIR_SOURCE_ROOT, 'out-foo', 'Release') - self.assertEquals( - [None, 'paks', 'foo.pak'], - device_dependencies.DevicePathComponentsFor( - test_path, output_directory)) + self.assertEqual([None, 'paks', 'foo.pak'], + device_dependencies.DevicePathComponentsFor( + test_path, output_directory)) if __name__ == '__main__': diff --git a/build/android/pylib/utils/dexdump.py b/build/android/pylib/utils/dexdump.py index f81ac603d432..0913aad47129 100644 --- a/build/android/pylib/utils/dexdump.py +++ b/build/android/pylib/utils/dexdump.py @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -8,6 +8,8 @@ import sys import tempfile from xml.etree import ElementTree +from collections import namedtuple +from typing import Dict from devil.utils import cmd_helper from pylib import constants @@ -18,6 +20,27 @@ DEXDUMP_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'dexdump') +# Annotations dict format: +# { +# 'empty-annotation-class-name': None, +# 'annotation-class-name': { +# 'fieldA': 'primitive-value', +# 'fieldB': [ 'array-item-1', 'array-item-2', ... ], +# 'fieldC': { # CURRENTLY UNSUPPORTED. +# /* Object value */ +# 'field': 'primitive-value', +# 'field': [ 'array-item-1', 'array-item-2', ... ], +# 'field': { /* Object value */ } +# } +# } +# } +Annotations = namedtuple('Annotations', + ['classAnnotations', 'methodsAnnotations']) + +# Finds each space-separated "foo=..." (where ... can contain spaces). +_ANNOTATION_VALUE_MATCHER = re.compile(r'\w+=.*?(?:$|(?= \w+=))') + + def Dump(apk_path): """Dumps class and method information from a APK into a dict via dexdump. @@ -29,7 +52,10 @@ def Dump(apk_path): : { 'classes': { : { - 'methods': [, ] + 'methods': [, ], + 'superclass': , + 'is_abstract': , + 'annotations': } } } @@ -42,7 +68,7 @@ def Dump(apk_path): dexfile_dir, pattern='*classes*.dex'): output_xml = cmd_helper.GetCmdOutput( - [DEXDUMP_PATH, '-l', 'xml', dex_file]) + [DEXDUMP_PATH, '-a', '-j', '-l', 'xml', dex_file]) # Dexdump doesn't escape its XML output very well; decode it as utf-8 with # invalid sequences replaced, then remove forbidden characters and # re-encode it (as etree expects a byte string as input so it can figure @@ -50,20 +76,142 @@ def Dump(apk_path): BAD_XML_CHARS = re.compile( u'[\x00-\x08\x0b-\x0c\x0e-\x1f\x7f-\x84\x86-\x9f' + u'\ud800-\udfff\ufdd0-\ufddf\ufffe-\uffff]') - if sys.version_info[0] < 3: - decoded_xml = output_xml.decode('utf-8', 'replace') - clean_xml = BAD_XML_CHARS.sub(u'\ufffd', decoded_xml) - else: - # Line duplicated to avoid pylint redefined-variable-type error. - clean_xml = BAD_XML_CHARS.sub(u'\ufffd', output_xml) + + # Line duplicated to avoid pylint redefined-variable-type error. + clean_xml = BAD_XML_CHARS.sub(u'\ufffd', output_xml) + + # Constructors are referenced as "" in our annotations + # which will result in in the ElementTree failing to parse + # our xml as it won't find a closing tag for this + clean_xml = clean_xml.replace('', 'constructor') + + annotations = _ParseAnnotations(clean_xml) + parsed_dex_files.append( - _ParseRootNode(ElementTree.fromstring(clean_xml.encode('utf-8')))) + _ParseRootNode(ElementTree.fromstring(clean_xml.encode('utf-8')), + annotations)) return parsed_dex_files finally: shutil.rmtree(dexfile_dir) -def _ParseRootNode(root): +def _ParseAnnotationValues(values_str): + if not values_str: + return None + ret = {} + for key_value in _ANNOTATION_VALUE_MATCHER.findall(values_str): + key, value_str = key_value.split('=', 1) + # TODO: support for dicts if ever needed. + if value_str.startswith('{ ') and value_str.endswith(' }'): + value = value_str[2:-2].split() + else: + value = value_str + ret[key] = value + return ret + + +def _ParseAnnotations(dexRaw: str) -> Dict[int, Annotations]: + """ Parse XML strings and return a list of Annotations mapped to + classes by index. + + Annotations are written to the dex dump as human readable blocks of text + The only prescription is that they appear before the class in our xml file + They are not required to be nested within the package as our classes + It is simpler to parse for all the annotations and then associate them + back to the + classes + + Example: + Class #12 annotations: + Annotations on class + VISIBILITY_RUNTIME Ldalvik/annotation/EnclosingClass; value=... + Annotations on method #512 'example' + VISIBILITY_SYSTEM Ldalvik/annotation/Signature; value=... + VISIBILITY_RUNTIME Landroidx/test/filters/SmallTest; + VISIBILITY_RUNTIME Lorg/chromium/base/test/util/Feature; value={ Cronet } + VISIBILITY_RUNTIME LFoo; key1={ A B } key2=4104 key3=null + """ + + # We want to find the lines matching the annotations header pattern + # Eg: Class #12 annotations -> true + annotationsBlockMatcher = re.compile(u'^Class #.*annotations:$') + # We want to retrieve the index of the class + # Eg: Class #12 annotations -> 12 + classIndexMatcher = re.compile(u'(?<=#)[0-9]*') + # We want to retrieve the method name from between the quotes + # of the annotations line + # Eg: Annotations on method #512 'example' -> example + methodMatcher = re.compile(u"(?<=')[^']*") + # We want to match everything after the last slash until before the semi colon + # Eg: Ldalvik/annotation/Signature; -> Signature + annotationMatcher = re.compile(u'([^/]+); ?(.*)?') + + annotations = {} + currentAnnotationsForClass = None + currentAnnotationsBlock: Dict[str, None] = None + + # This loop does four things + # 1. It looks for a line telling us we are describing annotations for + # a new class + # 2. It looks for a line telling us if the annotations we find will be + # for the class or for any of it's methods; we will keep reference to + # this + # 3. It adds the annotations to whatever we are holding reference to + # 4. It looks for a line to see if we should start looking for a + # new class again + for line in dexRaw.splitlines(): + if currentAnnotationsForClass is None: + # Step 1 + # We keep searching until we find an annotation descriptor + # This lets us know that we are storing annotations for a new class + if annotationsBlockMatcher.match(line): + currentClassIndex = int(classIndexMatcher.findall(line)[0]) + currentAnnotationsForClass = Annotations(classAnnotations={}, + methodsAnnotations={}) + annotations[currentClassIndex] = currentAnnotationsForClass + else: + # Step 2 + # If we find a descriptor indicating we are tracking annotations + # for the class or it's methods, we'll keep a reference of this + # block for when we start finding annotation references + if line.startswith(u'Annotations on class'): + currentAnnotationsBlock = currentAnnotationsForClass.classAnnotations + elif line.startswith(u'Annotations on method'): + method = methodMatcher.findall(line)[0] + currentAnnotationsBlock = {} + currentAnnotationsForClass.methodsAnnotations[ + method] = currentAnnotationsBlock + + # If we match against any other type of annotations + # we will ignore them + elif line.startswith(u'Annotations on'): + currentAnnotationsBlock = None + + # Step 3 + # We are only adding runtime annotations as those are the types + # that will affect if we should run tests or not (where this is + # being used) + elif currentAnnotationsBlock is not None and line.strip().startswith( + 'VISIBILITY_RUNTIME'): + annotationName, annotationValuesStr = annotationMatcher.findall(line)[0] + annotationValues = _ParseAnnotationValues(annotationValuesStr) + + # Our instrumentation tests expect a mapping of "Annotation: Value" + # We aren't using the value for anything and this would increase + # the complexity of this parser so just mapping these to None + currentAnnotationsBlock.update({annotationName: annotationValues}) + + # Step 4 + # Empty lines indicate that the annotation descriptions are complete + # and we should look for new classes + elif not line.strip(): + currentAnnotationsForClass = None + currentAnnotationsBlock = None + + return annotations + + +def _ParseRootNode(root, annotations: Dict[int, Annotations]): """Parses the XML output of dexdump. This output is in the following format. This is a subset of the information contained within dexdump output. @@ -86,10 +234,17 @@ def _ParseRootNode(root): """ results = {} + + # Annotations are referenced by the class order + # To match them, we need to keep track of the class number and + # match it to the appropriate annotation at that stage + classCount = 0 + for child in root: if child.tag == 'package': package_name = child.attrib['name'] - parsed_node = _ParsePackageNode(child) + parsed_node, classCount = _ParsePackageNode(child, classCount, + annotations) if package_name in results: results[package_name]['classes'].update(parsed_node['classes']) else: @@ -97,40 +252,62 @@ def _ParseRootNode(root): return results -def _ParsePackageNode(package_node): +def _ParsePackageNode(package_node, classCount: int, + annotations: Dict[int, Annotations]): """Parses a node from the dexdump xml output. Returns: - A dict in the format: - { + A tuple in the format: + (classes: { 'classes': { : { - 'methods': [, ] + 'methods': [, ], + 'superclass': , + 'is_abstract': , + 'annotations': }, : { - 'methods': [, ] + 'methods': [, ], + 'superclass': , + 'is_abstract': , + 'annotations': }, } - } + }, classCount: number) """ classes = {} for child in package_node: if child.tag == 'class': - classes[child.attrib['name']] = _ParseClassNode(child) - return {'classes': classes} + classes[child.attrib['name']] = _ParseClassNode(child, classCount, + annotations) + classCount += 1 + return ({'classes': classes}, classCount) -def _ParseClassNode(class_node): +def _ParseClassNode(class_node, classIndex: int, + annotations: Dict[int, Annotations]): """Parses a node from the dexdump xml output. Returns: A dict in the format: { - 'methods': [, ] + 'methods': [, ], + 'superclass': , + 'is_abstract': } """ methods = [] for child in class_node: - if child.tag == 'method': + if child.tag == 'method' and child.attrib['visibility'] == 'public': methods.append(child.attrib['name']) - return {'methods': methods, 'superclass': class_node.attrib['extends']} + return { + 'methods': + methods, + 'superclass': + class_node.attrib['extends'], + 'is_abstract': + class_node.attrib.get('abstract') == 'true', + 'annotations': + annotations.get(classIndex, + Annotations(classAnnotations={}, methodsAnnotations={})) + } diff --git a/build/android/pylib/utils/dexdump_test.py b/build/android/pylib/utils/dexdump_test.py index 3197853fc000..2b7c72866db8 100755 --- a/build/android/pylib/utils/dexdump_test.py +++ b/build/android/pylib/utils/dexdump_test.py @@ -1,5 +1,5 @@ -#! /usr/bin/env vpython -# Copyright 2016 The Chromium Authors. All rights reserved. +#! /usr/bin/env vpython3 +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -10,131 +10,197 @@ # pylint: disable=protected-access +emptyAnnotations = dexdump.Annotations(classAnnotations={}, + methodsAnnotations={}) + class DexdumpXMLParseTest(unittest.TestCase): - def testParseRootXmlNode(self): + def testParseAnnotations(self): example_xml_string = ( - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' - '') + '\n' + 'Class #1 annotations:\n' + 'Annotations on class\n' + ' VISIBILITY_RUNTIME Ldalvik/annotation/AppModeFull; value=Alpha\n' + 'Annotations on method #512 \'example\'\n' + ' VISIBILITY_SYSTEM Ldalvik/annotation/Signature; value=Bravo\n' + ' VISIBILITY_RUNTIME Ldalvik/annotation/Test;\n' + ' VISIBILITY_RUNTIME Ldalvik/annotation/Test2; value=Charlie\n' + ' VISIBILITY_RUNTIME Ldalvik/annotation/Test3; A=B x B={ C D }\n' + ' VISIBILITY_RUNTIME Ldalvik/annotation/Test4; A=B x B={ C D } C=D\n' + '\n' + '\n' + '\n' + '\n' + '\n') + + actual = dexdump._ParseAnnotations(example_xml_string) - actual = dexdump._ParseRootNode( - ElementTree.fromstring(example_xml_string)) + expected = { + 1: + dexdump.Annotations( + classAnnotations={'AppModeFull': { + 'value': 'Alpha' + }}, + methodsAnnotations={ + 'example': { + 'Test': None, + 'Test2': { + 'value': 'Charlie' + }, + 'Test3': { + 'A': 'B x', + 'B': ['C', 'D'] + }, + 'Test4': { + 'A': 'B x', + 'B': ['C', 'D'], + 'C': 'D' + }, + } + }, + ) + } + + self.assertEqual(expected, actual) + + def testParseRootXmlNode(self): + example_xml_string = ('' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '') + + actual = dexdump._ParseRootNode(ElementTree.fromstring(example_xml_string), + {}) expected = { - 'com.foo.bar1' : { - 'classes': { - 'Class1': { - 'methods': ['class1Method1', 'class1Method2'], - 'superclass': 'java.lang.Object', - }, - 'Class2': { - 'methods': ['class2Method1'], - 'superclass': 'java.lang.Object', - } + 'com.foo.bar1': { + 'classes': { + 'Class1': { + 'methods': ['class1Method1', 'class1Method2'], + 'superclass': 'java.lang.Object', + 'is_abstract': False, + 'annotations': emptyAnnotations, + }, + 'Class2': { + 'methods': ['class2Method1'], + 'superclass': 'java.lang.Object', + 'is_abstract': True, + 'annotations': emptyAnnotations, + } + }, + }, + 'com.foo.bar2': { + 'classes': {} + }, + 'com.foo.bar3': { + 'classes': {} }, - }, - 'com.foo.bar2' : {'classes': {}}, - 'com.foo.bar3' : {'classes': {}}, } - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) def testParsePackageNode(self): example_xml_string = ( '' '' '' - '' + '' '' '') - actual = dexdump._ParsePackageNode( - ElementTree.fromstring(example_xml_string)) + (actual, classCount) = dexdump._ParsePackageNode( + ElementTree.fromstring(example_xml_string), 0, {}) expected = { - 'classes': { - 'Class1': { - 'methods': [], - 'superclass': 'java.lang.Object', - }, - 'Class2': { - 'methods': [], - 'superclass': 'java.lang.Object', + 'classes': { + 'Class1': { + 'methods': [], + 'superclass': 'java.lang.Object', + 'is_abstract': False, + 'annotations': emptyAnnotations, + }, + 'Class2': { + 'methods': [], + 'superclass': 'java.lang.Object', + 'is_abstract': True, + 'annotations': emptyAnnotations, + }, }, - }, } - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) + self.assertEqual(classCount, 2) def testParseClassNode(self): - example_xml_string = ( - '' - '' - '' - '' - '' - '') - - actual = dexdump._ParseClassNode( - ElementTree.fromstring(example_xml_string)) + example_xml_string = ('' + '' + '' + '' + '' + '' + '' + '') + + actual = dexdump._ParseClassNode(ElementTree.fromstring(example_xml_string), + 0, {}) expected = { - 'methods': ['method1', 'method2'], - 'superclass': 'java.lang.Object', + 'methods': ['method1', 'method2'], + 'superclass': 'java.lang.Object', + 'is_abstract': False, + 'annotations': emptyAnnotations, } - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) if __name__ == '__main__': diff --git a/build/android/pylib/utils/gold_utils.py b/build/android/pylib/utils/gold_utils.py index 0b79a6d7cbc4..9dc9fe3e0415 100644 --- a/build/android/pylib/utils/gold_utils.py +++ b/build/android/pylib/utils/gold_utils.py @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """//build/android implementations of //testing/skia_gold_common. @@ -74,5 +74,5 @@ def GetSessionClass(): class AndroidSkiaGoldProperties(skia_gold_properties.SkiaGoldProperties): @staticmethod - def _GetGitOriginMasterHeadSha1(): - return repo_utils.GetGitOriginMasterHeadSHA1(host_paths.DIR_SOURCE_ROOT) + def _GetGitOriginMainHeadSha1(): + return repo_utils.GetGitOriginMainHeadSHA1(host_paths.DIR_SOURCE_ROOT) diff --git a/build/android/pylib/utils/gold_utils_test.py b/build/android/pylib/utils/gold_utils_test.py index 2d3cc5c7c2db..8a9f8a37aa8d 100755 --- a/build/android/pylib/utils/gold_utils_test.py +++ b/build/android/pylib/utils/gold_utils_test.py @@ -1,5 +1,5 @@ -#!/usr/bin/env vpython -# Copyright 2020 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Tests for gold_utils.""" @@ -66,14 +66,14 @@ def test_commandCommonArgs(self, cmd_mock): class AndroidSkiaGoldSessionDiffLinksTest(fake_filesystem_unittest.TestCase): - class FakeArchivedFile(object): + class FakeArchivedFile: def __init__(self, path): self.name = path def Link(self): return 'file://' + self.name - class FakeOutputManager(object): + class FakeOutputManager: def __init__(self): self.output_dir = tempfile.mkdtemp() diff --git a/build/android/pylib/utils/google_storage_helper.py b/build/android/pylib/utils/google_storage_helper.py index d184810517ae..27af7096259c 100644 --- a/build/android/pylib/utils/google_storage_helper.py +++ b/build/android/pylib/utils/google_storage_helper.py @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -13,7 +13,10 @@ import os import sys import time -import urlparse +try: + from urllib.parse import urlparse +except ImportError: + from urlparse import urlparse from pylib.constants import host_paths from pylib.utils import decorators @@ -22,9 +25,8 @@ sys.path.append(host_paths.DEVIL_PATH) from devil.utils import cmd_helper -_GSUTIL_PATH = os.path.join( - host_paths.DIR_SOURCE_ROOT, 'third_party', 'catapult', - 'third_party', 'gsutil', 'gsutil.py') +_GSUTIL_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', + 'catapult', 'third_party', 'gsutil', 'gsutil') _PUBLIC_URL = 'https://storage.googleapis.com/%s/' _AUTHENTICATED_URL = 'https://storage.cloud.google.com/%s/' @@ -67,7 +69,7 @@ def upload(name, filepath, bucket, gs_args=None, command_args=None, def read_from_link(link): # Note that urlparse returns the path with an initial '/', so we only need to # add one more after the 'gs;' - gs_path = 'gs:/%s' % urlparse.urlparse(link).path + gs_path = 'gs:/%s' % urlparse(link).path cmd = [_GSUTIL_PATH, '-q', 'cat', gs_path] return cmd_helper.GetCmdOutput(cmd) diff --git a/build/android/pylib/utils/instrumentation_tracing.py b/build/android/pylib/utils/instrumentation_tracing.py index f1d03a0dcf8e..3c9304e90915 100644 --- a/build/android/pylib/utils/instrumentation_tracing.py +++ b/build/android/pylib/utils/instrumentation_tracing.py @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -28,7 +28,8 @@ # Modules to exclude by default (to avoid problems like infinite loops) DEFAULT_EXCLUDE = [r'py_trace_event\..*'] -class _TraceArguments(object): + +class _TraceArguments: def __init__(self): """Wraps a dictionary to ensure safe evaluation of repr().""" self._arguments = {} @@ -75,7 +76,7 @@ def _shouldTrace(frame, to_include, to_exclude, included, excluded): if module_name in included: includes = True elif to_include: - includes = any([pattern.match(module_name) for pattern in to_include]) + includes = any(pattern.match(module_name) for pattern in to_include) else: includes = True @@ -161,6 +162,7 @@ def traceFunction(frame, event, arg): if event == "return": trace_event.trace_end(function_name) return None + return None return traceFunction diff --git a/build/android/pylib/utils/local_utils.py b/build/android/pylib/utils/local_utils.py index 027cca392507..a7d39d6a2305 100644 --- a/build/android/pylib/utils/local_utils.py +++ b/build/android/pylib/utils/local_utils.py @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Utilities for determining if a test is being run locally or not.""" diff --git a/build/android/pylib/utils/logdog_helper.py b/build/android/pylib/utils/logdog_helper.py index 68a7ba57ab7a..e1562f51522c 100644 --- a/build/android/pylib/utils/logdog_helper.py +++ b/build/android/pylib/utils/logdog_helper.py @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -11,9 +11,11 @@ from pylib import constants from pylib.utils import decorators -sys.path.insert(0, os.path.abspath(os.path.join( - constants.DIR_SOURCE_ROOT, 'tools', 'swarming_client'))) -from libs.logdog import bootstrap # pylint: disable=import-error +sys.path.insert( + 0, + os.path.abspath( + os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'logdog'))) +from logdog import bootstrap # pylint: disable=import-error @decorators.NoRaiseException(default_return_value='', diff --git a/build/android/pylib/utils/logging_utils.py b/build/android/pylib/utils/logging_utils.py index 9c4eae3fcb90..fdb0fa6a8b42 100644 --- a/build/android/pylib/utils/logging_utils.py +++ b/build/android/pylib/utils/logging_utils.py @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -24,6 +24,7 @@ class _ColorFormatter(logging.Formatter): # pylint: disable=no-member color_map = { logging.DEBUG: (FORE.CYAN), + logging.INFO: (), # Use default style. logging.WARNING: (FORE.YELLOW), logging.ERROR: (FORE.RED), logging.CRITICAL: (BACK.RED), @@ -31,7 +32,7 @@ class _ColorFormatter(logging.Formatter): def __init__(self, wrapped_formatter=None): """Wraps a |logging.Formatter| and adds color.""" - super(_ColorFormatter, self).__init__(self) + super().__init__() self._wrapped_formatter = wrapped_formatter or logging.Formatter() #override @@ -63,24 +64,27 @@ class ColorStreamHandler(logging.StreamHandler): """ def __init__(self, force_color=False): - super(ColorStreamHandler, self).__init__() + super().__init__() self.force_color = force_color self.setFormatter(logging.Formatter()) @property def is_tty(self): - isatty = getattr(self.stream, 'isatty', None) - return isatty and isatty() + try: + isatty = getattr(self.stream, 'isatty') + except AttributeError: + return False + return isatty() #override - def setFormatter(self, formatter): + def setFormatter(self, fmt): if self.force_color or self.is_tty: - formatter = _ColorFormatter(formatter) - super(ColorStreamHandler, self).setFormatter(formatter) + fmt = _ColorFormatter(fmt) + super().setFormatter(fmt) @staticmethod def MakeDefault(force_color=False): - """ + """ Replaces the default logging handlers with a coloring handler. To use a colorizing handler at the same time as others, either register them after this call, or add the ColorStreamHandler on the logger using @@ -89,9 +93,9 @@ def MakeDefault(force_color=False): Args: force_color: Set to True to bypass the tty check and always colorize. """ - # If the existing handlers aren't removed, messages are duplicated - logging.getLogger().handlers = [] - logging.getLogger().addHandler(ColorStreamHandler(force_color)) + # If the existing handlers aren't removed, messages are duplicated + logging.getLogger().handlers = [] + logging.getLogger().addHandler(ColorStreamHandler(force_color)) @contextlib.contextmanager @@ -110,7 +114,7 @@ def OverrideColor(level, color): try: yield finally: - for formatter, prev_color in prev_colors.iteritems(): + for formatter, prev_color in prev_colors.items(): formatter.color_map[level] = prev_color diff --git a/build/android/pylib/utils/maven_downloader.py b/build/android/pylib/utils/maven_downloader.py index 1dc1542ea19f..fd9d97304f1d 100755 --- a/build/android/pylib/utils/maven_downloader.py +++ b/build/android/pylib/utils/maven_downloader.py @@ -1,5 +1,5 @@ -#!/usr/bin/env vpython -# Copyright 2017 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -23,7 +23,7 @@ def _MakeDirsIfAbsent(path): raise -class MavenDownloader(object): +class MavenDownloader: ''' Downloads and installs the requested artifacts from the Google Maven repo. The artifacts are expected to be specified in the format @@ -71,7 +71,7 @@ def debug(self): return self._debug -class _SingleArtifactDownloader(object): +class _SingleArtifactDownloader: '''Handles downloading and installing a single Maven artifact.''' _POM_FILE_TYPE = 'pom' @@ -121,8 +121,8 @@ def _DownloadArtifact(self, group_id, artifact_id, version, file_type): if ret_code != 0: raise Exception('Command "{}" failed'.format(' '.join(cmd))) except OSError as e: - if e.errno == os.errno.ENOENT: - raise Exception('mvn command not found. Please install Maven.') + if e.errno == errno.ENOENT: + raise Exception('mvn command not found. Please install Maven.') from e raise return os.path.join(os.path.join(*group_id.split('.')), diff --git a/build/android/pylib/utils/proguard.py b/build/android/pylib/utils/proguard.py deleted file mode 100644 index 9d5bae285a53..000000000000 --- a/build/android/pylib/utils/proguard.py +++ /dev/null @@ -1,285 +0,0 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import os -import re -import tempfile - -from devil.utils import cmd_helper -from pylib import constants - - -_PROGUARD_CLASS_RE = re.compile(r'\s*?- Program class:\s*([\S]+)$') -_PROGUARD_SUPERCLASS_RE = re.compile(r'\s*? Superclass:\s*([\S]+)$') -_PROGUARD_SECTION_RE = re.compile( - r'^(Interfaces|Constant Pool|Fields|Methods|Class file attributes) ' - r'\(count = \d+\):$') -_PROGUARD_METHOD_RE = re.compile(r'\s*?- Method:\s*(\S*)[(].*$') -_PROGUARD_ANNOTATION_RE = re.compile(r'^(\s*?)- Annotation \[L(\S*);\]:$') -_ELEMENT_PRIMITIVE = 0 -_ELEMENT_ARRAY = 1 -_ELEMENT_ANNOTATION = 2 -_PROGUARD_ELEMENT_RES = [ - (_ELEMENT_PRIMITIVE, - re.compile(r'^(\s*?)- Constant element value \[(\S*) .*\]$')), - (_ELEMENT_ARRAY, - re.compile(r'^(\s*?)- Array element value \[(\S*)\]:$')), - (_ELEMENT_ANNOTATION, - re.compile(r'^(\s*?)- Annotation element value \[(\S*)\]:$')) -] -_PROGUARD_INDENT_WIDTH = 2 -_PROGUARD_ANNOTATION_VALUE_RE = re.compile(r'^(\s*?)- \S+? \[(.*)\]$') - - -def _GetProguardPath(): - return os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'proguard', - 'lib', 'proguard603.jar') - - -def Dump(jar_path): - """Dumps class and method information from a JAR into a dict via proguard. - - Args: - jar_path: An absolute path to the JAR file to dump. - Returns: - A dict in the following format: - { - 'classes': [ - { - 'class': '', - 'superclass': '', - 'annotations': {/* dict -- see below */}, - 'methods': [ - { - 'method': '', - 'annotations': {/* dict -- see below */}, - }, - ... - ], - }, - ... - ], - } - - Annotations dict format: - { - 'empty-annotation-class-name': None, - 'annotation-class-name': { - 'field': 'primitive-value', - 'field': [ 'array-item-1', 'array-item-2', ... ], - 'field': { - /* Object value */ - 'field': 'primitive-value', - 'field': [ 'array-item-1', 'array-item-2', ... ], - 'field': { /* Object value */ } - } - } - } - - Note that for top-level annotations their class names are used for - identification, whereas for any nested annotations the corresponding - field names are used. - - One drawback of this approach is that an array containing empty - annotation classes will be represented as an array of 'None' values, - thus it will not be possible to find out annotation class names. - On the other hand, storing both annotation class name and the field name - would produce a very complex JSON. - """ - - with tempfile.NamedTemporaryFile() as proguard_output: - cmd_helper.GetCmdStatusAndOutput([ - 'java', - '-jar', _GetProguardPath(), - '-injars', jar_path, - '-dontshrink', '-dontoptimize', '-dontobfuscate', '-dontpreverify', - '-dump', proguard_output.name]) - return Parse(proguard_output) - -class _AnnotationElement(object): - def __init__(self, name, ftype, depth): - self.ref = None - self.name = name - self.ftype = ftype - self.depth = depth - -class _ParseState(object): - _INITIAL_VALUES = (lambda: None, list, dict) - # Empty annotations are represented as 'None', not as an empty dictionary. - _LAZY_INITIAL_VALUES = (lambda: None, list, lambda: None) - - def __init__(self): - self._class_result = None - self._method_result = None - self._parse_annotations = False - self._annotation_stack = [] - - def ResetPerSection(self, section_name): - self.InitMethod(None) - self._parse_annotations = ( - section_name in ['Class file attributes', 'Methods']) - - def ParseAnnotations(self): - return self._parse_annotations - - def CreateAndInitClass(self, class_name): - self.InitMethod(None) - self._class_result = { - 'class': class_name, - 'superclass': '', - 'annotations': {}, - 'methods': [], - } - return self._class_result - - def HasCurrentClass(self): - return bool(self._class_result) - - def SetSuperClass(self, superclass): - assert self.HasCurrentClass() - self._class_result['superclass'] = superclass - - def InitMethod(self, method_name): - self._annotation_stack = [] - if method_name: - self._method_result = { - 'method': method_name, - 'annotations': {}, - } - self._class_result['methods'].append(self._method_result) - else: - self._method_result = None - - def InitAnnotation(self, annotation, depth): - if not self._annotation_stack: - # Add a fake parent element comprising 'annotations' dictionary, - # so we can work uniformly with both top-level and nested annotations. - annotations = _AnnotationElement( - '<<>>', _ELEMENT_ANNOTATION, depth - 1) - if self._method_result: - annotations.ref = self._method_result['annotations'] - else: - annotations.ref = self._class_result['annotations'] - self._annotation_stack = [annotations] - self._BacktrackAnnotationStack(depth) - if not self.HasCurrentAnnotation(): - self._annotation_stack.append( - _AnnotationElement(annotation, _ELEMENT_ANNOTATION, depth)) - self._CreateAnnotationPlaceHolder(self._LAZY_INITIAL_VALUES) - - def HasCurrentAnnotation(self): - return len(self._annotation_stack) > 1 - - def InitAnnotationField(self, field, field_type, depth): - self._BacktrackAnnotationStack(depth) - # Create the parent representation, if needed. E.g. annotations - # are represented with `None`, not with `{}` until they receive the first - # field. - self._CreateAnnotationPlaceHolder(self._INITIAL_VALUES) - if self._annotation_stack[-1].ftype == _ELEMENT_ARRAY: - # Nested arrays are not allowed in annotations. - assert not field_type == _ELEMENT_ARRAY - # Use array index instead of bogus field name. - field = len(self._annotation_stack[-1].ref) - self._annotation_stack.append(_AnnotationElement(field, field_type, depth)) - self._CreateAnnotationPlaceHolder(self._LAZY_INITIAL_VALUES) - - def UpdateCurrentAnnotationFieldValue(self, value, depth): - self._BacktrackAnnotationStack(depth) - self._InitOrUpdateCurrentField(value) - - def _CreateAnnotationPlaceHolder(self, constructors): - assert self.HasCurrentAnnotation() - field = self._annotation_stack[-1] - if field.ref is None: - field.ref = constructors[field.ftype]() - self._InitOrUpdateCurrentField(field.ref) - - def _BacktrackAnnotationStack(self, depth): - stack = self._annotation_stack - while len(stack) > 0 and stack[-1].depth >= depth: - stack.pop() - - def _InitOrUpdateCurrentField(self, value): - assert self.HasCurrentAnnotation() - parent = self._annotation_stack[-2] - assert not parent.ref is None - # There can be no nested constant element values. - assert parent.ftype in [_ELEMENT_ARRAY, _ELEMENT_ANNOTATION] - field = self._annotation_stack[-1] - if isinstance(value, str) and not field.ftype == _ELEMENT_PRIMITIVE: - # The value comes from the output parser via - # UpdateCurrentAnnotationFieldValue, and should be a value of a constant - # element. If it isn't, just skip it. - return - if parent.ftype == _ELEMENT_ARRAY and field.name >= len(parent.ref): - parent.ref.append(value) - else: - parent.ref[field.name] = value - - -def _GetDepth(prefix): - return len(prefix) // _PROGUARD_INDENT_WIDTH - -def Parse(proguard_output): - results = { - 'classes': [], - } - - state = _ParseState() - - for line in proguard_output: - line = line.strip('\r\n') - - m = _PROGUARD_CLASS_RE.match(line) - if m: - results['classes'].append( - state.CreateAndInitClass(m.group(1).replace('/', '.'))) - continue - - if not state.HasCurrentClass(): - continue - - m = _PROGUARD_SUPERCLASS_RE.match(line) - if m: - state.SetSuperClass(m.group(1).replace('/', '.')) - continue - - m = _PROGUARD_SECTION_RE.match(line) - if m: - state.ResetPerSection(m.group(1)) - continue - - m = _PROGUARD_METHOD_RE.match(line) - if m: - state.InitMethod(m.group(1)) - continue - - if not state.ParseAnnotations(): - continue - - m = _PROGUARD_ANNOTATION_RE.match(line) - if m: - # Ignore the annotation package. - state.InitAnnotation(m.group(2).split('/')[-1], _GetDepth(m.group(1))) - continue - - if state.HasCurrentAnnotation(): - m = None - for (element_type, element_re) in _PROGUARD_ELEMENT_RES: - m = element_re.match(line) - if m: - state.InitAnnotationField( - m.group(2), element_type, _GetDepth(m.group(1))) - break - if m: - continue - m = _PROGUARD_ANNOTATION_VALUE_RE.match(line) - if m: - state.UpdateCurrentAnnotationFieldValue( - m.group(2), _GetDepth(m.group(1))) - else: - state.InitMethod(None) - - return results diff --git a/build/android/pylib/utils/proguard_test.py b/build/android/pylib/utils/proguard_test.py deleted file mode 100755 index b11c299580e0..000000000000 --- a/build/android/pylib/utils/proguard_test.py +++ /dev/null @@ -1,495 +0,0 @@ -#! /usr/bin/env vpython -# Copyright 2014 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import unittest - -from pylib.utils import proguard - -class TestParse(unittest.TestCase): - - def setUp(self): - self.maxDiff = None - - def testClass(self): - actual = proguard.Parse( - ['- Program class: org/example/Test', - ' Superclass: java/lang/Object']) - expected = { - 'classes': [ - { - 'class': 'org.example.Test', - 'superclass': 'java.lang.Object', - 'annotations': {}, - 'methods': [] - } - ] - } - self.assertEquals(expected, actual) - - def testMethod(self): - actual = proguard.Parse( - ['- Program class: org/example/Test', - 'Methods (count = 1):', - '- Method: ()V']) - expected = { - 'classes': [ - { - 'class': 'org.example.Test', - 'superclass': '', - 'annotations': {}, - 'methods': [ - { - 'method': '', - 'annotations': {} - } - ] - } - ] - } - self.assertEquals(expected, actual) - - def testClassAnnotation(self): - actual = proguard.Parse( - ['- Program class: org/example/Test', - 'Class file attributes (count = 3):', - ' - Annotation [Lorg/example/Annotation;]:', - ' - Annotation [Lorg/example/AnnotationWithValue;]:', - ' - Constant element value [attr \'13\']', - ' - Utf8 [val]', - ' - Annotation [Lorg/example/AnnotationWithTwoValues;]:', - ' - Constant element value [attr1 \'13\']', - ' - Utf8 [val1]', - ' - Constant element value [attr2 \'13\']', - ' - Utf8 [val2]']) - expected = { - 'classes': [ - { - 'class': 'org.example.Test', - 'superclass': '', - 'annotations': { - 'Annotation': None, - 'AnnotationWithValue': {'attr': 'val'}, - 'AnnotationWithTwoValues': {'attr1': 'val1', 'attr2': 'val2'} - }, - 'methods': [] - } - ] - } - self.assertEquals(expected, actual) - - def testClassAnnotationWithArrays(self): - actual = proguard.Parse( - ['- Program class: org/example/Test', - 'Class file attributes (count = 3):', - ' - Annotation [Lorg/example/AnnotationWithEmptyArray;]:', - ' - Array element value [arrayAttr]:', - ' - Annotation [Lorg/example/AnnotationWithOneElemArray;]:', - ' - Array element value [arrayAttr]:', - ' - Constant element value [(default) \'13\']', - ' - Utf8 [val]', - ' - Annotation [Lorg/example/AnnotationWithTwoElemArray;]:', - ' - Array element value [arrayAttr]:', - ' - Constant element value [(default) \'13\']', - ' - Utf8 [val1]', - ' - Constant element value [(default) \'13\']', - ' - Utf8 [val2]']) - expected = { - 'classes': [ - { - 'class': 'org.example.Test', - 'superclass': '', - 'annotations': { - 'AnnotationWithEmptyArray': {'arrayAttr': []}, - 'AnnotationWithOneElemArray': {'arrayAttr': ['val']}, - 'AnnotationWithTwoElemArray': {'arrayAttr': ['val1', 'val2']} - }, - 'methods': [] - } - ] - } - self.assertEquals(expected, actual) - - def testNestedClassAnnotations(self): - actual = proguard.Parse( - ['- Program class: org/example/Test', - 'Class file attributes (count = 1):', - ' - Annotation [Lorg/example/OuterAnnotation;]:', - ' - Constant element value [outerAttr \'13\']', - ' - Utf8 [outerVal]', - ' - Array element value [outerArr]:', - ' - Constant element value [(default) \'13\']', - ' - Utf8 [outerArrVal1]', - ' - Constant element value [(default) \'13\']', - ' - Utf8 [outerArrVal2]', - ' - Annotation element value [emptyAnn]:', - ' - Annotation [Lorg/example/EmptyAnnotation;]:', - ' - Annotation element value [ann]:', - ' - Annotation [Lorg/example/InnerAnnotation;]:', - ' - Constant element value [innerAttr \'13\']', - ' - Utf8 [innerVal]', - ' - Array element value [innerArr]:', - ' - Constant element value [(default) \'13\']', - ' - Utf8 [innerArrVal1]', - ' - Constant element value [(default) \'13\']', - ' - Utf8 [innerArrVal2]', - ' - Annotation element value [emptyInnerAnn]:', - ' - Annotation [Lorg/example/EmptyAnnotation;]:']) - expected = { - 'classes': [ - { - 'class': 'org.example.Test', - 'superclass': '', - 'annotations': { - 'OuterAnnotation': { - 'outerAttr': 'outerVal', - 'outerArr': ['outerArrVal1', 'outerArrVal2'], - 'emptyAnn': None, - 'ann': { - 'innerAttr': 'innerVal', - 'innerArr': ['innerArrVal1', 'innerArrVal2'], - 'emptyInnerAnn': None - } - } - }, - 'methods': [] - } - ] - } - self.assertEquals(expected, actual) - - def testClassArraysOfAnnotations(self): - actual = proguard.Parse( - ['- Program class: org/example/Test', - 'Class file attributes (count = 1):', - ' - Annotation [Lorg/example/OuterAnnotation;]:', - ' - Array element value [arrayWithEmptyAnnotations]:', - ' - Annotation element value [(default)]:', - ' - Annotation [Lorg/example/EmptyAnnotation;]:', - ' - Annotation element value [(default)]:', - ' - Annotation [Lorg/example/EmptyAnnotation;]:', - ' - Array element value [outerArray]:', - ' - Annotation element value [(default)]:', - ' - Annotation [Lorg/example/InnerAnnotation;]:', - ' - Constant element value [innerAttr \'115\']', - ' - Utf8 [innerVal]', - ' - Array element value [arguments]:', - ' - Annotation element value [(default)]:', - ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:', - ' - Constant element value [arg1Attr \'115\']', - ' - Utf8 [arg1Val]', - ' - Array element value [arg1Array]:', - ' - Constant element value [(default) \'73\']', - ' - Integer [11]', - ' - Constant element value [(default) \'73\']', - ' - Integer [12]', - ' - Annotation element value [(default)]:', - ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:', - ' - Constant element value [arg2Attr \'115\']', - ' - Utf8 [arg2Val]', - ' - Array element value [arg2Array]:', - ' - Constant element value [(default) \'73\']', - ' - Integer [21]', - ' - Constant element value [(default) \'73\']', - ' - Integer [22]']) - expected = { - 'classes': [ - { - 'class': 'org.example.Test', - 'superclass': '', - 'annotations': { - 'OuterAnnotation': { - 'arrayWithEmptyAnnotations': [None, None], - 'outerArray': [ - { - 'innerAttr': 'innerVal', - 'arguments': [ - {'arg1Attr': 'arg1Val', 'arg1Array': ['11', '12']}, - {'arg2Attr': 'arg2Val', 'arg2Array': ['21', '22']} - ] - } - ] - } - }, - 'methods': [] - } - ] - } - self.assertEquals(expected, actual) - - def testReadFullClassFileAttributes(self): - actual = proguard.Parse( - ['- Program class: org/example/Test', - 'Class file attributes (count = 3):', - ' - Source file attribute:', - ' - Utf8 [Class.java]', - ' - Runtime visible annotations attribute:', - ' - Annotation [Lorg/example/IntValueAnnotation;]:', - ' - Constant element value [value \'73\']', - ' - Integer [19]', - ' - Inner classes attribute (count = 1)', - ' - InnerClassesInfo:', - ' Access flags: 0x9 = public static', - ' - Class [org/example/Class1]', - ' - Class [org/example/Class2]', - ' - Utf8 [OnPageFinishedHelper]']) - expected = { - 'classes': [ - { - 'class': 'org.example.Test', - 'superclass': '', - 'annotations': { - 'IntValueAnnotation': { - 'value': '19', - } - }, - 'methods': [] - } - ] - } - self.assertEquals(expected, actual) - - def testMethodAnnotation(self): - actual = proguard.Parse( - ['- Program class: org/example/Test', - 'Methods (count = 1):', - '- Method: Test()V', - ' - Annotation [Lorg/example/Annotation;]:', - ' - Annotation [Lorg/example/AnnotationWithValue;]:', - ' - Constant element value [attr \'13\']', - ' - Utf8 [val]', - ' - Annotation [Lorg/example/AnnotationWithTwoValues;]:', - ' - Constant element value [attr1 \'13\']', - ' - Utf8 [val1]', - ' - Constant element value [attr2 \'13\']', - ' - Utf8 [val2]']) - expected = { - 'classes': [ - { - 'class': 'org.example.Test', - 'superclass': '', - 'annotations': {}, - 'methods': [ - { - 'method': 'Test', - 'annotations': { - 'Annotation': None, - 'AnnotationWithValue': {'attr': 'val'}, - 'AnnotationWithTwoValues': {'attr1': 'val1', 'attr2': 'val2'} - }, - } - ] - } - ] - } - self.assertEquals(expected, actual) - - def testMethodAnnotationWithArrays(self): - actual = proguard.Parse( - ['- Program class: org/example/Test', - 'Methods (count = 1):', - '- Method: Test()V', - ' - Annotation [Lorg/example/AnnotationWithEmptyArray;]:', - ' - Array element value [arrayAttr]:', - ' - Annotation [Lorg/example/AnnotationWithOneElemArray;]:', - ' - Array element value [arrayAttr]:', - ' - Constant element value [(default) \'13\']', - ' - Utf8 [val]', - ' - Annotation [Lorg/example/AnnotationWithTwoElemArray;]:', - ' - Array element value [arrayAttr]:', - ' - Constant element value [(default) \'13\']', - ' - Utf8 [val1]', - ' - Constant element value [(default) \'13\']', - ' - Utf8 [val2]']) - expected = { - 'classes': [ - { - 'class': 'org.example.Test', - 'superclass': '', - 'annotations': {}, - 'methods': [ - { - 'method': 'Test', - 'annotations': { - 'AnnotationWithEmptyArray': {'arrayAttr': []}, - 'AnnotationWithOneElemArray': {'arrayAttr': ['val']}, - 'AnnotationWithTwoElemArray': {'arrayAttr': ['val1', 'val2']} - }, - } - ] - } - ] - } - self.assertEquals(expected, actual) - - def testMethodAnnotationWithPrimitivesAndArrays(self): - actual = proguard.Parse( - ['- Program class: org/example/Test', - 'Methods (count = 1):', - '- Method: Test()V', - ' - Annotation [Lorg/example/AnnotationPrimitiveThenArray;]:', - ' - Constant element value [attr \'13\']', - ' - Utf8 [val]', - ' - Array element value [arrayAttr]:', - ' - Constant element value [(default) \'13\']', - ' - Utf8 [val]', - ' - Annotation [Lorg/example/AnnotationArrayThenPrimitive;]:', - ' - Array element value [arrayAttr]:', - ' - Constant element value [(default) \'13\']', - ' - Utf8 [val]', - ' - Constant element value [attr \'13\']', - ' - Utf8 [val]', - ' - Annotation [Lorg/example/AnnotationTwoArrays;]:', - ' - Array element value [arrayAttr1]:', - ' - Constant element value [(default) \'13\']', - ' - Utf8 [val1]', - ' - Array element value [arrayAttr2]:', - ' - Constant element value [(default) \'13\']', - ' - Utf8 [val2]']) - expected = { - 'classes': [ - { - 'class': 'org.example.Test', - 'superclass': '', - 'annotations': {}, - 'methods': [ - { - 'method': 'Test', - 'annotations': { - 'AnnotationPrimitiveThenArray': {'attr': 'val', - 'arrayAttr': ['val']}, - 'AnnotationArrayThenPrimitive': {'arrayAttr': ['val'], - 'attr': 'val'}, - 'AnnotationTwoArrays': {'arrayAttr1': ['val1'], - 'arrayAttr2': ['val2']} - }, - } - ] - } - ] - } - self.assertEquals(expected, actual) - - def testNestedMethodAnnotations(self): - actual = proguard.Parse( - ['- Program class: org/example/Test', - 'Methods (count = 1):', - '- Method: Test()V', - ' - Annotation [Lorg/example/OuterAnnotation;]:', - ' - Constant element value [outerAttr \'13\']', - ' - Utf8 [outerVal]', - ' - Array element value [outerArr]:', - ' - Constant element value [(default) \'13\']', - ' - Utf8 [outerArrVal1]', - ' - Constant element value [(default) \'13\']', - ' - Utf8 [outerArrVal2]', - ' - Annotation element value [emptyAnn]:', - ' - Annotation [Lorg/example/EmptyAnnotation;]:', - ' - Annotation element value [ann]:', - ' - Annotation [Lorg/example/InnerAnnotation;]:', - ' - Constant element value [innerAttr \'13\']', - ' - Utf8 [innerVal]', - ' - Array element value [innerArr]:', - ' - Constant element value [(default) \'13\']', - ' - Utf8 [innerArrVal1]', - ' - Constant element value [(default) \'13\']', - ' - Utf8 [innerArrVal2]', - ' - Annotation element value [emptyInnerAnn]:', - ' - Annotation [Lorg/example/EmptyAnnotation;]:']) - expected = { - 'classes': [ - { - 'class': 'org.example.Test', - 'superclass': '', - 'annotations': {}, - 'methods': [ - { - 'method': 'Test', - 'annotations': { - 'OuterAnnotation': { - 'outerAttr': 'outerVal', - 'outerArr': ['outerArrVal1', 'outerArrVal2'], - 'emptyAnn': None, - 'ann': { - 'innerAttr': 'innerVal', - 'innerArr': ['innerArrVal1', 'innerArrVal2'], - 'emptyInnerAnn': None - } - } - }, - } - ] - } - ] - } - self.assertEquals(expected, actual) - - def testMethodArraysOfAnnotations(self): - actual = proguard.Parse( - ['- Program class: org/example/Test', - 'Methods (count = 1):', - '- Method: Test()V', - ' - Annotation [Lorg/example/OuterAnnotation;]:', - ' - Array element value [arrayWithEmptyAnnotations]:', - ' - Annotation element value [(default)]:', - ' - Annotation [Lorg/example/EmptyAnnotation;]:', - ' - Annotation element value [(default)]:', - ' - Annotation [Lorg/example/EmptyAnnotation;]:', - ' - Array element value [outerArray]:', - ' - Annotation element value [(default)]:', - ' - Annotation [Lorg/example/InnerAnnotation;]:', - ' - Constant element value [innerAttr \'115\']', - ' - Utf8 [innerVal]', - ' - Array element value [arguments]:', - ' - Annotation element value [(default)]:', - ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:', - ' - Constant element value [arg1Attr \'115\']', - ' - Utf8 [arg1Val]', - ' - Array element value [arg1Array]:', - ' - Constant element value [(default) \'73\']', - ' - Integer [11]', - ' - Constant element value [(default) \'73\']', - ' - Integer [12]', - ' - Annotation element value [(default)]:', - ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:', - ' - Constant element value [arg2Attr \'115\']', - ' - Utf8 [arg2Val]', - ' - Array element value [arg2Array]:', - ' - Constant element value [(default) \'73\']', - ' - Integer [21]', - ' - Constant element value [(default) \'73\']', - ' - Integer [22]']) - expected = { - 'classes': [ - { - 'class': 'org.example.Test', - 'superclass': '', - 'annotations': {}, - 'methods': [ - { - 'method': 'Test', - 'annotations': { - 'OuterAnnotation': { - 'arrayWithEmptyAnnotations': [None, None], - 'outerArray': [ - { - 'innerAttr': 'innerVal', - 'arguments': [ - {'arg1Attr': 'arg1Val', 'arg1Array': ['11', '12']}, - {'arg2Attr': 'arg2Val', 'arg2Array': ['21', '22']} - ] - } - ] - } - } - } - ] - } - ] - } - self.assertEquals(expected, actual) - - -if __name__ == '__main__': - unittest.main() diff --git a/build/android/pylib/utils/repo_utils.py b/build/android/pylib/utils/repo_utils.py index f9d300a2145c..4e1b7a57c3d6 100644 --- a/build/android/pylib/utils/repo_utils.py +++ b/build/android/pylib/utils/repo_utils.py @@ -1,4 +1,4 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -20,3 +20,9 @@ def GetGitOriginMasterHeadSHA1(in_directory): command_line = ['git', 'rev-parse', 'origin/master'] output = cmd_helper.GetCmdOutput(command_line, cwd=in_directory) return output.strip() + + +def GetGitOriginMainHeadSHA1(in_directory): + command_line = ['git', 'rev-parse', 'origin/main'] + output = cmd_helper.GetCmdOutput(command_line, cwd=in_directory) + return output.strip() diff --git a/build/android/pylib/utils/shared_preference_utils.py b/build/android/pylib/utils/shared_preference_utils.py index ae0d31b78439..93324c6b80f8 100644 --- a/build/android/pylib/utils/shared_preference_utils.py +++ b/build/android/pylib/utils/shared_preference_utils.py @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -19,12 +19,19 @@ def UnicodeToStr(data): strings. """ if isinstance(data, dict): - return {UnicodeToStr(key): UnicodeToStr(value) - for key, value in data.iteritems()} - elif isinstance(data, list): + return { + UnicodeToStr(key): UnicodeToStr(value) + for key, value in data.items() + } + if isinstance(data, list): return [UnicodeToStr(element) for element in data] - elif isinstance(data, unicode): - return data.encode('utf-8') + try: + # Python-2 compatibility. + if isinstance(data, unicode): + return data.encode('utf-8') + except NameError: + # Strings are already unicode in python3. + pass return data @@ -80,16 +87,30 @@ def ApplySharedPreferenceSetting(shared_pref, setting): shared_pref.Remove(key) except KeyError: logging.warning("Attempted to remove non-existent key %s", key) - for key, value in setting.get('set', {}).iteritems(): - if isinstance(value, bool): + for key, value in setting.get('set', {}).items(): + is_set = False + if not is_set and isinstance(value, bool): shared_pref.SetBoolean(key, value) - elif isinstance(value, basestring): - shared_pref.SetString(key, value) - elif isinstance(value, long) or isinstance(value, int): - shared_pref.SetLong(key, value) - elif isinstance(value, list): + is_set = True + try: + # Python-2 compatibility. + if not is_set and isinstance(value, basestring): + shared_pref.SetString(key, value) + is_set = True + if not is_set and isinstance(value, (long, int)): + shared_pref.SetLong(key, value) + is_set = True + except NameError: + if not is_set and isinstance(value, str): + shared_pref.SetString(key, value) + is_set = True + if not is_set and isinstance(value, int): + shared_pref.SetLong(key, value) + is_set = True + if not is_set and isinstance(value, list): shared_pref.SetStringSet(key, value) - else: + is_set = True + if not is_set: raise ValueError("Given invalid value type %s for key %s" % ( str(type(value)), key)) shared_pref.Commit() diff --git a/build/android/pylib/utils/simpleperf.py b/build/android/pylib/utils/simpleperf.py index b3ba00e6c226..f096093c7f1e 100644 --- a/build/android/pylib/utils/simpleperf.py +++ b/build/android/pylib/utils/simpleperf.py @@ -1,8 +1,9 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import contextlib +import logging import os import shutil import subprocess @@ -10,7 +11,7 @@ import tempfile from devil import devil_env -from devil.android import device_signal +from devil.android import device_signal, device_errors from devil.android.sdk import version_codes from pylib import constants @@ -108,6 +109,7 @@ def _ThreadType(thread_name): return 'main' if thread_name.startswith('RenderThread'): return 'render' + raise ValueError('got no matching thread_name') def _GetSpecifiedTID(device, pid, thread_specifier): @@ -156,8 +158,8 @@ def InstallSimpleperf(device, package_name): @contextlib.contextmanager def RunSimpleperf(device, device_simpleperf_path, package_name, - process_specifier, thread_specifier, profiler_args, - host_out_path): + process_specifier, thread_specifier, events, + profiler_args, host_out_path): pid = _GetSpecifiedPID(device, package_name, process_specifier) tid = _GetSpecifiedTID(device, pid, thread_specifier) if pid is None and tid is None: @@ -167,16 +169,34 @@ def RunSimpleperf(device, device_simpleperf_path, package_name, profiler_args = list(profiler_args) if profiler_args and profiler_args[0] == 'record': profiler_args.pop(0) + profiler_args.extend(('-e', events)) if '--call-graph' not in profiler_args and '-g' not in profiler_args: profiler_args.append('-g') if '-f' not in profiler_args: profiler_args.extend(('-f', '1000')) + device_out_path = '/data/local/tmp/perf.data' + should_remove_device_out_path = True if '-o' in profiler_args: device_out_path = profiler_args[profiler_args.index('-o') + 1] + should_remove_device_out_path = False else: profiler_args.extend(('-o', device_out_path)) + # Remove the default output to avoid confusion if simpleperf opts not + # to update the file. + file_exists = True + try: + device.adb.Shell('readlink -e ' + device_out_path) + except device_errors.AdbCommandFailedError: + file_exists = False + if file_exists: + logging.warning('%s output file already exists on device', device_out_path) + if not should_remove_device_out_path: + raise RuntimeError('Specified output file \'{}\' already exists, not ' + 'continuing'.format(device_out_path)) + device.adb.Shell('rm -f ' + device_out_path) + if tid: profiler_args.extend(('-t', str(tid))) else: @@ -195,7 +215,18 @@ def RunSimpleperf(device, device_simpleperf_path, package_name, quiet=True) if completed: adb_shell_simpleperf_process.wait() - device.PullFile(device_out_path, host_out_path) + ret = adb_shell_simpleperf_process.returncode + if ret == 0: + # Successfully gathered a profile + device.PullFile(device_out_path, host_out_path) + else: + logging.warning( + 'simpleperf exited unusually, expected exit 0, got %d', ret + ) + stdout, stderr = adb_shell_simpleperf_process.communicate() + logging.info('stdout: \'%s\', stderr: \'%s\'', stdout, stderr) + raise RuntimeError('simpleperf exited with unexpected code {} ' + '(run with -vv for full stdout/stderr)'.format(ret)) def ConvertSimpleperfToPprof(simpleperf_out_path, build_directory, @@ -216,8 +247,10 @@ def ConvertSimpleperfToPprof(simpleperf_out_path, build_directory, report_path = os.path.join(script_dir, 'report.py') report_cmd = [sys.executable, report_path, '-i', simpleperf_out_path] device_lib_path = None - for line in subprocess.check_output( - report_cmd, stderr=subprocess.STDOUT).splitlines(): + output = subprocess.check_output(report_cmd, stderr=subprocess.STDOUT) + if isinstance(output, bytes): + output = output.decode() + for line in output.splitlines(): fields = line.split() if len(fields) < 5: continue diff --git a/build/android/pylib/utils/test_filter.py b/build/android/pylib/utils/test_filter.py index 6db62438d424..c532f324e175 100644 --- a/build/android/pylib/utils/test_filter.py +++ b/build/android/pylib/utils/test_filter.py @@ -1,4 +1,4 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -9,9 +9,6 @@ _CMDLINE_NAME_SEGMENT_RE = re.compile( r' with(?:out)? \{[^\}]*\}') -class ConflictingPositiveFiltersException(Exception): - """Raised when both filter file and filter argument have positive filters.""" - def ParseFilterFile(input_lines): """Converts test filter file contents to positive and negative pattern lists. @@ -20,7 +17,7 @@ def ParseFilterFile(input_lines): syntax that |input_lines| are expected to follow. See - https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md#running-a-subset-of-the-tests + https://github.com/google/googletest/blob/main/docs/advanced.md#running-a-subset-of-the-tests for description of the syntax that --gtest_filter argument should follow. Args: @@ -50,20 +47,26 @@ def AddFilterOptions(parser): '--gtest-filter-file', # New argument. '--test-launcher-filter-file', - dest='test_filter_file', + action='append', + dest='test_filter_files', help='Path to file that contains googletest-style filter strings. ' 'See also //testing/buildbot/filters/README.md.') filter_group = parser.add_mutually_exclusive_group() - filter_group.add_argument( - '-f', '--test-filter', '--gtest_filter', '--gtest-filter', - dest='test_filter', - help='googletest-style filter string.', - default=os.environ.get('GTEST_FILTER')) + filter_group.add_argument('-f', + '--test-filter', + '--gtest_filter', + '--gtest-filter', + dest='test_filters', + action='append', + help='googletest-style filter string.', + default=os.environ.get('GTEST_FILTER')) filter_group.add_argument( '--isolated-script-test-filter', + action='append', + dest='isolated_script_test_filters', help='isolated script filter string. ' - 'Like gtest filter strings, but with :: separators instead of :') + 'Like gtest filter strings, but with :: separators instead of :') def AppendPatternsToFilter(test_filter, positive_patterns=None, @@ -107,35 +110,36 @@ def HasPositivePatterns(test_filter): return bool(len(test_filter) > 0 and test_filter[0] != '-') -def InitializeFilterFromArgs(args): +def InitializeFiltersFromArgs(args): """Returns a filter string from the command-line option values. Args: args: an argparse.Namespace instance resulting from a using parser to which the filter options above were added. - - Raises: - ConflictingPositiveFiltersException if both filter file and command line - specify positive filters. """ - test_filter = '' - if args.isolated_script_test_filter: - args.test_filter = args.isolated_script_test_filter.replace('::', ':') - if args.test_filter: - test_filter = _CMDLINE_NAME_SEGMENT_RE.sub( - '', args.test_filter.replace('#', '.')) - - if args.test_filter_file: - for test_filter_file in args.test_filter_file.split(';'): + test_filters = [] + if args.isolated_script_test_filters: + args.test_filters = [ + isolated_script_test_filter.replace('::', ':') + for isolated_script_test_filter in args.isolated_script_test_filters + ] + if args.test_filters: + for filt in args.test_filters: + test_filters.append( + _CMDLINE_NAME_SEGMENT_RE.sub('', filt.replace('#', '.'))) + + if not args.test_filter_files: + return test_filters + + # At this point it's potentially several files, in a list and ; separated + for test_filter_files in args.test_filter_files: + # At this point it's potentially several files, ; separated + for test_filter_file in test_filter_files.split(';'): + # At this point it's individual files with open(test_filter_file, 'r') as f: - positive_file_patterns, negative_file_patterns = ParseFilterFile(f) - if positive_file_patterns and HasPositivePatterns(test_filter): - raise ConflictingPositiveFiltersException( - 'Cannot specify positive pattern in both filter file and ' + - 'filter command line argument') - test_filter = AppendPatternsToFilter( - test_filter, - positive_patterns=positive_file_patterns, - negative_patterns=negative_file_patterns) - - return test_filter + positive_patterns, negative_patterns = ParseFilterFile(f) + filter_string = AppendPatternsToFilter('', positive_patterns, + negative_patterns) + test_filters.append(filter_string) + + return test_filters diff --git a/build/android/pylib/utils/test_filter_test.py b/build/android/pylib/utils/test_filter_test.py index 1ae5a7ebe0b8..fa0718257b06 100755 --- a/build/android/pylib/utils/test_filter_test.py +++ b/build/android/pylib/utils/test_filter_test.py @@ -1,9 +1,10 @@ -#!/usr/bin/env vpython -# Copyright 2018 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import argparse +import os import sys import tempfile import unittest @@ -22,7 +23,7 @@ def testParseFilterFile_commentsAndBlankLines(self): ] actual = test_filter.ParseFilterFile(input_lines) expected = ['positive1', 'positive2', 'positive3'], [] - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) def testParseFilterFile_onlyPositive(self): input_lines = [ @@ -31,7 +32,7 @@ def testParseFilterFile_onlyPositive(self): ] actual = test_filter.ParseFilterFile(input_lines) expected = ['positive1', 'positive2'], [] - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) def testParseFilterFile_onlyNegative(self): input_lines = [ @@ -40,7 +41,7 @@ def testParseFilterFile_onlyNegative(self): ] actual = test_filter.ParseFilterFile(input_lines) expected = [], ['negative1', 'negative2'] - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) def testParseFilterFile_positiveAndNegative(self): input_lines = [ @@ -51,7 +52,7 @@ def testParseFilterFile_positiveAndNegative(self): ] actual = test_filter.ParseFilterFile(input_lines) expected = ['positive1', 'positive2'], ['negative1', 'negative2'] - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) class InitializeFilterFromArgsTest(unittest.TestCase): @@ -62,9 +63,9 @@ def testInitializeBasicFilter(self): args = parser.parse_args([ '--test-filter', 'FooTest.testFoo:BarTest.testBar']) - expected = 'FooTest.testFoo:BarTest.testBar' - actual = test_filter.InitializeFilterFromArgs(args) - self.assertEquals(actual, expected) + expected = ['FooTest.testFoo:BarTest.testBar'] + actual = test_filter.InitializeFiltersFromArgs(args) + self.assertEqual(actual, expected) def testInitializeJavaStyleFilter(self): parser = argparse.ArgumentParser() @@ -72,9 +73,9 @@ def testInitializeJavaStyleFilter(self): args = parser.parse_args([ '--test-filter', 'FooTest#testFoo:BarTest#testBar']) - expected = 'FooTest.testFoo:BarTest.testBar' - actual = test_filter.InitializeFilterFromArgs(args) - self.assertEquals(actual, expected) + expected = ['FooTest.testFoo:BarTest.testBar'] + actual = test_filter.InitializeFiltersFromArgs(args) + self.assertEqual(actual, expected) def testInitializeBasicIsolatedScript(self): parser = argparse.ArgumentParser() @@ -82,28 +83,32 @@ def testInitializeBasicIsolatedScript(self): args = parser.parse_args([ '--isolated-script-test-filter', 'FooTest.testFoo::BarTest.testBar']) - expected = 'FooTest.testFoo:BarTest.testBar' - actual = test_filter.InitializeFilterFromArgs(args) - self.assertEquals(actual, expected) + expected = ['FooTest.testFoo:BarTest.testBar'] + actual = test_filter.InitializeFiltersFromArgs(args) + self.assertEqual(actual, expected) + @unittest.skipIf(os.name == "nt", "Opening NamedTemporaryFile by name " + "doesn't work in Windows.") def testFilterArgWithPositiveFilterInFilterFile(self): parser = argparse.ArgumentParser() test_filter.AddFilterOptions(parser) - with tempfile.NamedTemporaryFile() as tmp_file: + with tempfile.NamedTemporaryFile(mode='w') as tmp_file: tmp_file.write('positive1\npositive2\n-negative2\n-negative3\n') tmp_file.seek(0) args = parser.parse_args([ '--test-filter=-negative1', '--test-launcher-filter-file', tmp_file.name]) - expected = 'positive1:positive2-negative1:negative2:negative3' - actual = test_filter.InitializeFilterFromArgs(args) - self.assertEquals(actual, expected) + expected = ['-negative1', 'positive1:positive2-negative2:negative3'] + actual = test_filter.InitializeFiltersFromArgs(args) + self.assertEqual(actual, expected) + @unittest.skipIf(os.name == "nt", "Opening NamedTemporaryFile by name " + "doesn't work in Windows.") def testFilterFileWithPositiveFilterInFilterArg(self): parser = argparse.ArgumentParser() test_filter.AddFilterOptions(parser) - with tempfile.NamedTemporaryFile() as tmp_file: + with tempfile.NamedTemporaryFile(mode='w') as tmp_file: tmp_file.write('-negative2\n-negative3\n') tmp_file.seek(0) args = parser.parse_args([ @@ -111,89 +116,103 @@ def testFilterFileWithPositiveFilterInFilterArg(self): 'positive1:positive2-negative1', '--test-launcher-filter-file', tmp_file.name]) - expected = 'positive1:positive2-negative1:negative2:negative3' - actual = test_filter.InitializeFilterFromArgs(args) - self.assertEquals(actual, expected) + expected = ['positive1:positive2-negative1', '-negative2:negative3'] + actual = test_filter.InitializeFiltersFromArgs(args) + self.assertEqual(actual, expected) + @unittest.skipIf(os.name == "nt", "Opening NamedTemporaryFile by name " + "doesn't work in Windows.") def testPositiveFilterInBothFileAndArg(self): parser = argparse.ArgumentParser() test_filter.AddFilterOptions(parser) - with tempfile.NamedTemporaryFile() as tmp_file: - tmp_file.write('positive1\n') + with tempfile.NamedTemporaryFile(mode='w') as tmp_file: + tmp_file.write('positive2-negative2\n') tmp_file.seek(0) args = parser.parse_args([ - '--test-filter', - 'positive2', - '--test-launcher-filter-file', - tmp_file.name]) - with self.assertRaises(test_filter.ConflictingPositiveFiltersException): - test_filter.InitializeFilterFromArgs(args) + '--test-filter', 'positive1-negative1', '--test-launcher-filter-file', + tmp_file.name + ]) + expected = ['positive1-negative1', 'positive2-negative2'] + actual = test_filter.InitializeFiltersFromArgs(args) + self.assertEqual(actual, expected) + @unittest.skipIf(os.name == "nt", "Opening NamedTemporaryFile by name " + "doesn't work in Windows.") def testFilterArgWithFilterFileAllNegative(self): parser = argparse.ArgumentParser() test_filter.AddFilterOptions(parser) - with tempfile.NamedTemporaryFile() as tmp_file: + with tempfile.NamedTemporaryFile(mode='w') as tmp_file: tmp_file.write('-negative3\n-negative4\n') tmp_file.seek(0) args = parser.parse_args([ '--test-filter=-negative1:negative2', '--test-launcher-filter-file', tmp_file.name]) - expected = '-negative1:negative2:negative3:negative4' - actual = test_filter.InitializeFilterFromArgs(args) - self.assertEquals(actual, expected) + expected = ['-negative1:negative2', '-negative3:negative4'] + actual = test_filter.InitializeFiltersFromArgs(args) + self.assertEqual(actual, expected) class AppendPatternsToFilter(unittest.TestCase): def testAllEmpty(self): expected = '' actual = test_filter.AppendPatternsToFilter('', [], []) - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) + def testAppendOnlyPositiveToEmptyFilter(self): expected = 'positive' actual = test_filter.AppendPatternsToFilter('', ['positive']) - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) + def testAppendOnlyNegativeToEmptyFilter(self): expected = '-negative' actual = test_filter.AppendPatternsToFilter('', negative_patterns=['negative']) - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) + def testAppendToEmptyFilter(self): expected = 'positive-negative' actual = test_filter.AppendPatternsToFilter('', ['positive'], ['negative']) - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) + def testAppendToPositiveOnlyFilter(self): expected = 'positive1:positive2-negative' actual = test_filter.AppendPatternsToFilter('positive1', ['positive2'], ['negative']) - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) + def testAppendToNegativeOnlyFilter(self): expected = 'positive-negative1:negative2' actual = test_filter.AppendPatternsToFilter('-negative1', ['positive'], ['negative2']) - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) + def testAppendPositiveToFilter(self): expected = 'positive1:positive2-negative1' actual = test_filter.AppendPatternsToFilter('positive1-negative1', ['positive2']) - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) + def testAppendNegativeToFilter(self): expected = 'positive1-negative1:negative2' actual = test_filter.AppendPatternsToFilter('positive1-negative1', negative_patterns=['negative2']) - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) + def testAppendBothToFilter(self): expected = 'positive1:positive2-negative1:negative2' actual = test_filter.AppendPatternsToFilter('positive1-negative1', positive_patterns=['positive2'], negative_patterns=['negative2']) - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) + def testAppendMultipleToFilter(self): expected = 'positive1:positive2:positive3-negative1:negative2:negative3' actual = test_filter.AppendPatternsToFilter('positive1-negative1', ['positive2', 'positive3'], ['negative2', 'negative3']) - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) + def testRepeatedAppendToFilter(self): expected = 'positive1:positive2:positive3-negative1:negative2:negative3' filter_string = test_filter.AppendPatternsToFilter('positive1-negative1', @@ -201,32 +220,36 @@ def testRepeatedAppendToFilter(self): ['negative2']) actual = test_filter.AppendPatternsToFilter(filter_string, ['positive3'], ['negative3']) - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) + def testAppendHashSeparatedPatternsToFilter(self): expected = 'positive.test1:positive.test2-negative.test1:negative.test2' actual = test_filter.AppendPatternsToFilter('positive#test1-negative#test1', ['positive#test2'], ['negative#test2']) - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) class HasPositivePatterns(unittest.TestCase): def testEmpty(self): expected = False actual = test_filter.HasPositivePatterns('') - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) + def testHasOnlyPositive(self): expected = True actual = test_filter.HasPositivePatterns('positive') - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) + def testHasOnlyNegative(self): expected = False actual = test_filter.HasPositivePatterns('-negative') - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) + def testHasBoth(self): expected = True actual = test_filter.HasPositivePatterns('positive-negative') - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) if __name__ == '__main__': diff --git a/build/android/pylib/utils/time_profile.py b/build/android/pylib/utils/time_profile.py index 094799c4f2a1..54b96c290a1d 100644 --- a/build/android/pylib/utils/time_profile.py +++ b/build/android/pylib/utils/time_profile.py @@ -1,4 +1,4 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -6,7 +6,7 @@ import time -class TimeProfile(object): +class TimeProfile: """Class for simple profiling of action, with logging of cost.""" def __init__(self, description='operation'): diff --git a/build/android/pylib/utils/xvfb.py b/build/android/pylib/utils/xvfb.py index cb9d50e8fd97..6ab24afc75d1 100644 --- a/build/android/pylib/utils/xvfb.py +++ b/build/android/pylib/utils/xvfb.py @@ -1,4 +1,4 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -16,7 +16,7 @@ def _IsLinux(): return sys.platform.startswith('linux') -class Xvfb(object): +class Xvfb: """Class to start and stop Xvfb if relevant. Nop if not Linux.""" def __init__(self): diff --git a/build/android/pylib/valgrind_tools.py b/build/android/pylib/valgrind_tools.py index aea70ecfceb4..bab7fcaa868e 100644 --- a/build/android/pylib/valgrind_tools.py +++ b/build/android/pylib/valgrind_tools.py @@ -1,10 +1,11 @@ -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # pylint: disable=R0201 -from __future__ import print_function + + from __future__ import absolute_import import logging @@ -35,7 +36,7 @@ class AddressSanitizerTool(base_tool.BaseTool): EXTRA_OPTIONS = 'strict_memcmp=0,use_sigaltstack=1' def __init__(self, device): - super(AddressSanitizerTool, self).__init__() + super().__init__() self._device = device @classmethod @@ -92,10 +93,10 @@ def CreateTool(tool_name, device): ctor = TOOL_REGISTRY.get(tool_name) if ctor: return ctor(device) - else: - print('Unknown tool %s, available tools: %s' % (tool_name, ', '.join( - sorted(TOOL_REGISTRY.keys())))) - sys.exit(1) + print('Unknown tool %s, available tools: %s' % + (tool_name, ', '.join(sorted(TOOL_REGISTRY.keys())))) + sys.exit(1) + def PushFilesForTool(tool_name, device): """Pushes the files required for |tool_name| to |device|. diff --git a/build/android/resource_sizes.gni b/build/android/resource_sizes.gni index 2c91749c5ee1..c599bbbf71f1 100644 --- a/build/android/resource_sizes.gni +++ b/build/android/resource_sizes.gni @@ -1,4 +1,4 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -32,10 +32,7 @@ template("android_resource_sizes_test") { "@WrappedPath(.)", ] - data = [ - "//.vpython", - "//.vpython3", - ] + data = [] if (defined(invoker.trichrome_chrome_path)) { data += [ invoker.trichrome_chrome_path, @@ -78,18 +75,15 @@ template("android_resource_sizes_test") { # relative to $root_build_dir. The resulting JSON file is written to # "$root_build_dir/config/${invoker.name}_size_config.json". # -# Variables: -# name: The name of the path to the generated size config JSON file. -# mapping_files: List of mapping files. -# to_resource_sizes_py: Scope containing data to pass to resource_sizes.py, -# processed by generate_commit_size_analysis.py. -# supersize_input_file: Main input for SuperSize. +# Refer to tools/binary_size/generate_commit_size_analysis.py for JSON schema. +# template("android_size_bot_config") { _full_target_name = get_label_info(target_name, "label_no_toolchain") _out_json = { _HEADER = "Written by build target '${_full_target_name}'" forward_variables_from(invoker, [ + "archive_files", "mapping_files", "to_resource_sizes_py", "supersize_input_file", diff --git a/build/android/resource_sizes.py b/build/android/resource_sizes.py index c59297072a84..05ee86ce265b 100755 --- a/build/android/resource_sizes.py +++ b/build/android/resource_sizes.py @@ -1,5 +1,5 @@ -#!/usr/bin/env vpython -# Copyright (c) 2011 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2011 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -8,7 +8,6 @@ More information at //docs/speed/binary_size/metrics.md. """ -from __future__ import print_function import argparse import collections @@ -33,11 +32,11 @@ from pylib.constants import host_paths _AAPT_PATH = lazy.WeakConstant(lambda: build_tools.GetPath('aapt')) -_BUILD_UTILS_PATH = os.path.join( - host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'gyp') - -with host_paths.SysPath(os.path.join(host_paths.DIR_SOURCE_ROOT, 'build')): - import gn_helpers # pylint: disable=import-error +_ANDROID_UTILS_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'build', + 'android', 'gyp') +_BUILD_UTILS_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'build', 'util') +_READOBJ_PATH = os.path.join(constants.ANDROID_NDK_ROOT, 'toolchains', 'llvm', + 'prebuilt', 'linux-x86_64', 'bin', 'llvm-readobj') with host_paths.SysPath(host_paths.BUILD_COMMON_PATH): import perf_tests_results_helper # pylint: disable=import-error @@ -45,12 +44,12 @@ with host_paths.SysPath(host_paths.TRACING_PATH): from tracing.value import convert_chart_json # pylint: disable=import-error -with host_paths.SysPath(_BUILD_UTILS_PATH, 0): +with host_paths.SysPath(_ANDROID_UTILS_PATH, 0): from util import build_utils # pylint: disable=import-error - from util import zipalign # pylint: disable=import-error - -zipalign.ApplyZipFileZipAlignFix() +with host_paths.SysPath(_BUILD_UTILS_PATH, 0): + from lib.results import result_sink # pylint: disable=import-error + from lib.results import result_types # pylint: disable=import-error # Captures an entire config from aapt output. _AAPT_CONFIG_PATTERN = r'config %s:(.*?)config [a-zA-Z-]+:' @@ -89,7 +88,7 @@ } -class _AccumulatingReporter(object): +class _AccumulatingReporter: def __init__(self): self._combined_metrics = collections.defaultdict(int) @@ -98,26 +97,25 @@ def __call__(self, graph_title, trace_title, value, units): def DumpReports(self, report_func): for (graph_title, trace_title, - units), value in sorted(self._combined_metrics.iteritems()): + units), value in sorted(self._combined_metrics.items()): report_func(graph_title, trace_title, value, units) class _ChartJsonReporter(_AccumulatingReporter): def __init__(self, chartjson): - super(_ChartJsonReporter, self).__init__() + super().__init__() self._chartjson = chartjson self.trace_title_prefix = '' def __call__(self, graph_title, trace_title, value, units): - super(_ChartJsonReporter, self).__call__(graph_title, trace_title, value, - units) + super().__call__(graph_title, trace_title, value, units) perf_tests_results_helper.ReportPerfResult( self._chartjson, graph_title, self.trace_title_prefix + trace_title, value, units) def SynthesizeTotals(self, unique_method_count): - for tup, value in sorted(self._combined_metrics.iteritems()): + for tup, value in sorted(self._combined_metrics.items()): graph_title, trace_title, units = tup if trace_title == 'unique methods': value = unique_method_count @@ -167,35 +165,34 @@ def _MeasureApkSignatureBlock(zip_file): return start_of_central_directory - end_of_last_file -def _RunReadelf(so_path, options, tool_prefix=''): - return cmd_helper.GetCmdOutput( - [tool_prefix + 'readelf'] + options + [so_path]) +def _RunReadobj(so_path, options): + return cmd_helper.GetCmdOutput([_READOBJ_PATH, '--elf-output-style=GNU'] + + options + [so_path]) -def _ExtractLibSectionSizesFromApk(apk_path, lib_path, tool_prefix): +def _ExtractLibSectionSizesFromApk(apk_path, lib_path): with Unzip(apk_path, filename=lib_path) as extracted_lib_path: grouped_section_sizes = collections.defaultdict(int) no_bits_section_sizes, section_sizes = _CreateSectionNameSizeMap( - extracted_lib_path, tool_prefix) - for group_name, section_names in _READELF_SIZES_METRICS.iteritems(): + extracted_lib_path) + for group_name, section_names in _READELF_SIZES_METRICS.items(): for section_name in section_names: if section_name in section_sizes: grouped_section_sizes[group_name] += section_sizes.pop(section_name) # Consider all NOBITS sections as .bss. - grouped_section_sizes['bss'] = sum( - v for v in no_bits_section_sizes.itervalues()) + grouped_section_sizes['bss'] = sum(no_bits_section_sizes.values()) # Group any unknown section headers into the "other" group. - for section_header, section_size in section_sizes.iteritems(): + for section_header, section_size in section_sizes.items(): sys.stderr.write('Unknown elf section header: %s\n' % section_header) grouped_section_sizes['other'] += section_size return grouped_section_sizes -def _CreateSectionNameSizeMap(so_path, tool_prefix): - stdout = _RunReadelf(so_path, ['-S', '--wide'], tool_prefix) +def _CreateSectionNameSizeMap(so_path): + stdout = _RunReadobj(so_path, ['-S', '--wide']) section_sizes = {} no_bits_section_sizes = {} # Matches [ 2] .hash HASH 00000000006681f0 0001f0 003154 04 A 3 0 8 @@ -212,18 +209,19 @@ def _ParseManifestAttributes(apk_path): output = cmd_helper.GetCmdOutput([ _AAPT_PATH.read(), 'd', 'xmltree', apk_path, 'AndroidManifest.xml']) - def parse_attr(name): + def parse_attr(namespace, name): # android:extractNativeLibs(0x010104ea)=(type 0x12)0x0 # android:extractNativeLibs(0x010104ea)=(type 0x12)0xffffffff # dist:onDemand=(type 0x12)0xffffffff - m = re.search(name + r'(?:\(.*?\))?=\(type .*?\)(\w+)', output) + m = re.search( + f'(?:{namespace}:)?{name}' + r'(?:\(.*?\))?=\(type .*?\)(\w+)', output) return m and int(m.group(1), 16) - skip_extract_lib = bool(parse_attr('android:extractNativeLibs')) - sdk_version = parse_attr('android:minSdkVersion') - is_feature_split = parse_attr('android:isFeatureSplit') + skip_extract_lib = bool(parse_attr('android', 'extractNativeLibs')) + sdk_version = parse_attr('android', 'minSdkVersion') + is_feature_split = parse_attr('android', 'isFeatureSplit') # Can use , or . - on_demand = parse_attr('dist:onDemand') or 'dist:on-demand' in output + on_demand = parse_attr('dist', 'onDemand') or 'on-demand' in output on_demand = bool(on_demand and is_feature_split) return sdk_version, skip_extract_lib, on_demand @@ -265,7 +263,7 @@ def _NormalizeResourcesArsc(apk_path, num_arsc_files, num_translations, config_count = num_translations - 2 size = 0 - for res_id, string_val in en_strings.iteritems(): + for res_id, string_val in en_strings.items(): if string_val == fr_strings[res_id]: string_size = len(string_val) # 7 bytes is the per-entry overhead (not specific to any string). See @@ -294,7 +292,7 @@ def _RunAaptDumpResources(apk_path): return output -class _FileGroup(object): +class _FileGroup: """Represents a category that apk files can fall into.""" def __init__(self, name): @@ -342,7 +340,6 @@ def _AnalyzeInternal(apk_path, report_func, dex_stats_collector, out_dir, - tool_prefix, apks_path=None, split_name=None): """Analyse APK to determine size contributions of different file classes. @@ -400,8 +397,13 @@ def has_no_extension(filename): is_webview = 'WebView' in orig_filename is_monochrome = 'Monochrome' in orig_filename is_library = 'Library' in orig_filename + is_trichrome = 'TrichromeChrome' in orig_filename + # WebView is always a shared APK since other apps load it. + # Library is always shared since it's used by chrome and webview + # Chrome is always shared since renderers can't access dex otherwise + # (see DexFixer). is_shared_apk = sdk_version >= 24 and (is_monochrome or is_webview - or is_library) + or is_library or is_trichrome) # Dex decompression overhead varies by Android version. if sdk_version < 21: # JellyBean & KitKat @@ -426,8 +428,14 @@ def has_no_extension(filename): should_extract_lib = not skip_extract_lib and basename.startswith('lib') native_code.AddZipInfo( member, extracted_multiplier=int(should_extract_lib)) - elif filename.endswith('.dex'): - java_code.AddZipInfo(member, extracted_multiplier=dex_multiplier) + elif filename.startswith('classes') and filename.endswith('.dex'): + # Android P+, uncompressed dex does not need to be extracted. + compressed = member.compress_type != zipfile.ZIP_STORED + multiplier = dex_multiplier + if not compressed and sdk_version >= 28: + multiplier -= 1 + + java_code.AddZipInfo(member, extracted_multiplier=multiplier) elif re.search(_RE_NON_LANGUAGE_PAK, filename): native_resources_no_translations.AddZipInfo(member) elif filename.endswith('.pak') or filename.endswith('.lpak'): @@ -492,9 +500,15 @@ def has_no_extension(filename): report_func('Uncompressed', group.name + ' size', uncompressed_size, 'bytes') - if group is java_code and is_shared_apk: + if group is java_code: # Updates are compiled using quicken, but system image uses speed-profile. - extracted_size = int(uncompressed_size * speed_profile_dex_multiplier) + multiplier = speed_profile_dex_multiplier + + # Android P+, uncompressed dex does not need to be extracted. + compressed = uncompressed_size != actual_size + if not compressed and sdk_version >= 28: + multiplier -= 1 + extracted_size = int(uncompressed_size * multiplier) total_install_size_android_go += extracted_size report_func('InstallBreakdownGo', group.name + ' size', actual_size + extracted_size, 'bytes') @@ -512,9 +526,8 @@ def has_no_extension(filename): report_func('InstallSize', 'APK size', total_apk_size, 'bytes') report_func('InstallSize', 'Estimated installed size', int(total_install_size), 'bytes') - if is_shared_apk: - report_func('InstallSize', 'Estimated installed size (Android Go)', - int(total_install_size_android_go), 'bytes') + report_func('InstallSize', 'Estimated installed size (Android Go)', + int(total_install_size_android_go), 'bytes') transfer_size = _CalculateCompressedSize(apk_path) report_func('TransferSize', 'Transfer size (deflate)', transfer_size, 'bytes') @@ -529,10 +542,9 @@ def has_no_extension(filename): main_lib_info = native_code.FindLargest() native_code_unaligned_size = 0 for lib_info in native_code.AllEntries(): - section_sizes = _ExtractLibSectionSizesFromApk(apk_path, lib_info.filename, - tool_prefix) - native_code_unaligned_size += sum( - v for k, v in section_sizes.iteritems() if k != 'bss') + section_sizes = _ExtractLibSectionSizesFromApk(apk_path, lib_info.filename) + native_code_unaligned_size += sum(v for k, v in section_sizes.items() + if k != 'bss') # Size of main .so vs remaining. if lib_info == main_lib_info: main_lib_size = lib_info.file_size @@ -540,7 +552,7 @@ def has_no_extension(filename): secondary_size = native_code.ComputeUncompressedSize() - main_lib_size report_func('Specifics', 'other lib size', secondary_size, 'bytes') - for metric_name, size in section_sizes.iteritems(): + for metric_name, size in section_sizes.items(): report_func('MainLibInfo', metric_name, size, 'bytes') # Main metric that we want to monitor for jumps. @@ -634,7 +646,7 @@ def _CalculateCompressedSize(file_path): compressor = zlib.compressobj() total_size = 0 with open(file_path, 'rb') as f: - for chunk in iter(lambda: f.read(CHUNK_SIZE), ''): + for chunk in iter(lambda: f.read(CHUNK_SIZE), b''): total_size += len(compressor.compress(chunk)) total_size += len(compressor.flush()) return total_size @@ -652,7 +664,7 @@ def Unzip(zip_file, filename=None): yield unzipped_files[0] -def _ConfigOutDirAndToolsPrefix(out_dir): +def _ConfigOutDir(out_dir): if out_dir: constants.SetOutputDirectory(out_dir) else: @@ -661,10 +673,8 @@ def _ConfigOutDirAndToolsPrefix(out_dir): constants.CheckOutputDirectory() out_dir = constants.GetOutDirectory() except Exception: # pylint: disable=broad-except - return out_dir, '' - build_vars = gn_helpers.ReadBuildVars(out_dir) - tool_prefix = os.path.join(out_dir, build_vars['android_tool_prefix']) - return out_dir, tool_prefix + pass + return out_dir def _IterSplits(namelist): @@ -686,16 +696,15 @@ def _ExtractToTempFile(zip_obj, subpath, temp_file): temp_file.flush() -def _AnalyzeApkOrApks(report_func, apk_path, args): +def _AnalyzeApkOrApks(report_func, apk_path, out_dir): # Create DexStatsCollector here to track unique methods across base & chrome # modules. dex_stats_collector = method_count.DexStatsCollector() - out_dir, tool_prefix = _ConfigOutDirAndToolsPrefix(args.out_dir) if apk_path.endswith('.apk'): sdk_version, _, _ = _ParseManifestAttributes(apk_path) _AnalyzeInternal(apk_path, sdk_version, report_func, dex_stats_collector, - out_dir, tool_prefix) + out_dir) elif apk_path.endswith('.apks'): with tempfile.NamedTemporaryFile(suffix='.apk') as f: with zipfile.ZipFile(apk_path) as z: @@ -726,7 +735,6 @@ def do_measure(split_name, on_demand): inner_report_func, inner_dex_stats_collector, out_dir, - tool_prefix, apks_path=apk_path, split_name=split_name) report_func('DFM_' + split_name, 'Size with hindi', size, 'bytes') @@ -772,13 +780,14 @@ def _ResourceSizes(args): for prefix, path in specs: if path: reporter.trace_title_prefix = prefix - child_dex_stats_collector = _AnalyzeApkOrApks(reporter, path, args) + child_dex_stats_collector = _AnalyzeApkOrApks(reporter, path, + args.out_dir) dex_stats_collector.MergeFrom(prefix, child_dex_stats_collector) if any(path for _, path in specs): reporter.SynthesizeTotals(dex_stats_collector.GetUniqueMethodCount()) else: - _AnalyzeApkOrApks(reporter, args.input, args) + _AnalyzeApkOrApks(reporter, args.input, args.out_dir) if chartjson: _DumpChartJson(args, chartjson) @@ -813,11 +822,12 @@ def _DumpChartJson(args, chartjson): histogram_path = os.path.join(args.output_dir, 'perf_results.json') logging.critical('Dumping histograms to %s', histogram_path) - with open(histogram_path, 'w') as json_file: + with open(histogram_path, 'wb') as json_file: json_file.write(histogram_result.stdout) def main(): + build_utils.InitLogging('RESOURCE_SIZES_DEBUG') argparser = argparse.ArgumentParser(description='Print APK size metrics.') argparser.add_argument( '--min-pak-resource-size', @@ -875,12 +885,14 @@ def main(): '--trichrome-library', help='Path to Trichrome Library .apk') args = argparser.parse_args() + args.out_dir = _ConfigOutDir(args.out_dir) devil_chromium.Initialize(output_directory=args.out_dir) # TODO(bsheedy): Remove this once uses of --chartjson have been removed. if args.chartjson: args.output_format = 'chartjson' + result_sink_client = result_sink.TryInitClient() isolated_script_output = {'valid': False, 'failures': []} test_name = 'resource_sizes (%s)' % os.path.basename(args.input) @@ -904,6 +916,13 @@ def main(): json.dump(isolated_script_output, output_file) with open(args.isolated_script_test_output, 'w') as output_file: json.dump(isolated_script_output, output_file) + if result_sink_client: + status = result_types.PASS + if not isolated_script_output['valid']: + status = result_types.UNKNOWN + elif isolated_script_output['failures']: + status = result_types.FAIL + result_sink_client.Post(test_name, status, None, None, None) if __name__ == '__main__': diff --git a/build/android/resource_sizes.pydeps b/build/android/resource_sizes.pydeps index d956f5bae727..86db3ff82585 100644 --- a/build/android/resource_sizes.pydeps +++ b/build/android/resource_sizes.pydeps @@ -43,12 +43,15 @@ ../../third_party/catapult/tracing/tracing/value/convert_chart_json.py ../../third_party/catapult/tracing/tracing_project.py ../gn_helpers.py +../util/lib/__init__.py ../util/lib/common/perf_result_data_type.py ../util/lib/common/perf_tests_results_helper.py +../util/lib/results/__init__.py +../util/lib/results/result_sink.py +../util/lib/results/result_types.py devil_chromium.py gyp/util/__init__.py gyp/util/build_utils.py -gyp/util/zipalign.py method_count.py pylib/__init__.py pylib/constants/__init__.py diff --git a/build/android/screenshot.py b/build/android/screenshot.py index 523d859a2154..6366e85555ae 100755 --- a/build/android/screenshot.py +++ b/build/android/screenshot.py @@ -1,5 +1,5 @@ -#!/usr/bin/env vpython -# Copyright 2015 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/stacktrace/BUILD.gn b/build/android/stacktrace/BUILD.gn index ce13a15b4b64..0501a96706f1 100644 --- a/build/android/stacktrace/BUILD.gn +++ b/build/android/stacktrace/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -9,10 +9,7 @@ java_library("java_deobfuscate_java") { # Avoid using java_prebuilt() to ensure all uses go through the checked-in # wrapper script. - input_jars_paths = [ - "//third_party/proguard/lib/proguard603.jar", - "//third_party/proguard/lib/retrace603.jar", - ] + input_jars_paths = [ "//third_party/r8/lib/r8.jar" ] } # Use the checked-in copy of the wrapper script & .jar rather than the built @@ -20,9 +17,8 @@ java_library("java_deobfuscate_java") { group("java_deobfuscate") { data = [ "java_deobfuscate.py", - "java_deobfuscate.jar", - "//third_party/proguard/lib/proguard603.jar", - "//third_party/proguard/lib/retrace603.jar", + "java_deobfuscate_java.jar", + "//third_party/r8/lib/r8.jar", ] deps = [ "//third_party/jdk:java_data" ] } diff --git a/build/android/stacktrace/README.md b/build/android/stacktrace/README.md index 58ea94be9896..528af2278860 100644 --- a/build/android/stacktrace/README.md +++ b/build/android/stacktrace/README.md @@ -14,8 +14,8 @@ And have it actually show output without logcat terminating. ## Update Instructions: - ninja -C out/Release java_deobfuscate - cp out/Release/lib.java/build/android/stacktrace/java_deobfuscate.jar build/android/stacktrace + ninja -C out/Release java_deobfuscate_java + cp out/Release/lib.java/build/android/stacktrace/java_deobfuscate_java.jar build/android/stacktrace # stackwalker.py diff --git a/build/android/stacktrace/crashpad_stackwalker.py b/build/android/stacktrace/crashpad_stackwalker.py index 9616a54ba654..9703b7c88da7 100755 --- a/build/android/stacktrace/crashpad_stackwalker.py +++ b/build/android/stacktrace/crashpad_stackwalker.py @@ -1,6 +1,6 @@ -#!/usr/bin/env vpython +#!/usr/bin/env vpython3 # -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/stacktrace/java/org/chromium/build/FlushingReTrace.java b/build/android/stacktrace/java/org/chromium/build/FlushingReTrace.java index baa931328b4a..3e27197542a0 100644 --- a/build/android/stacktrace/java/org/chromium/build/FlushingReTrace.java +++ b/build/android/stacktrace/java/org/chromium/build/FlushingReTrace.java @@ -1,18 +1,21 @@ -// Copyright 2017 The Chromium Authors. All rights reserved. +// Copyright 2017 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.build; +import com.android.tools.r8.DiagnosticsHandler; +import com.android.tools.r8.retrace.ProguardMappingSupplier; +import com.android.tools.r8.retrace.Retrace; +import com.android.tools.r8.retrace.RetraceCommand; +import com.android.tools.r8.retrace.StackTraceSupplier; + import java.io.BufferedReader; -import java.io.File; +import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; -import java.io.LineNumberReader; -import java.io.OutputStreamWriter; -import java.io.PrintWriter; - -import proguard.retrace.ReTrace; +import java.util.Collections; +import java.util.List; /** * A wrapper around ReTrace that: @@ -40,6 +43,9 @@ public class FlushingReTrace { // Normal stack trace lines look like: // \tat org.chromium.chrome.browser.tab.Tab.handleJavaCrash(Tab.java:682) + "(?:.*?(?::|\\bat)\\s+%c\\.%m\\s*\\(\\s*%s(?:\\s*:\\s*%l\\s*)?\\))|" + // Stack trace from crbug.com/1300215 looks like: + // 0xffffffff (chromium-TrichromeChromeGoogle.aab-canary-490400033: 70) ii2.p + + "(?:.*?\\(\\s*%s(?:\\s*:\\s*%l\\s*)?\\)\\s*%c\\.%m)|" // E.g.: Caused by: java.lang.NullPointerException: Attempt to read from field 'int bLA' // on a null object reference + "(?:.*java\\.lang\\.NullPointerException.*[\"']%t\\s*%c\\.(?:%f|%m\\(%a\\))[\"'].*)|" @@ -93,24 +99,48 @@ public static void main(String[] args) { usage(); } - File mappingFile = new File(args[0]); try { - LineNumberReader reader = new LineNumberReader( - new BufferedReader(new InputStreamReader(System.in, "UTF-8"))); + ProguardMappingSupplier mappingSupplier = + ProguardMappingSupplier.builder() + .setProguardMapProducer(() -> new FileInputStream(args[0])) + .build(); + // Force earger parsing of .mapping file (~10 second operation). It otherwise would + // not happen until the first line of input is received. + // https://crbug.com/1351023 + mappingSupplier.createRetracer(new DiagnosticsHandler() {}); - // Enabling autoFlush is the main difference from ReTrace.main(). - boolean autoFlush = true; - PrintWriter writer = - new PrintWriter(new OutputStreamWriter(System.out, "UTF-8"), autoFlush); + // This whole command was given to us by the R8 team in b/234758957. + RetraceCommand retraceCommand = + RetraceCommand.builder() + .setMappingSupplier(mappingSupplier) + .setRetracedStackTraceConsumer( + retraced -> retraced.forEach(System.out::println)) + .setRegularExpression(LINE_PARSE_REGEX) + .setStackTrace(new StackTraceSupplier() { + final BufferedReader mReader = new BufferedReader( + new InputStreamReader(System.in, "UTF-8")); - boolean verbose = false; - new ReTrace(LINE_PARSE_REGEX, verbose, mappingFile).retrace(reader, writer); + @Override + public List get() { + try { + String line = mReader.readLine(); + if (line == null) { + return null; + } + return Collections.singletonList(line); + } catch (IOException e) { + e.printStackTrace(); + return null; + } + } + }) + .build(); + Retrace.run(retraceCommand); } catch (IOException ex) { // Print a verbose stack trace. ex.printStackTrace(); System.exit(1); } - System.exit(0); } } diff --git a/build/android/stacktrace/java_deobfuscate.jar b/build/android/stacktrace/java_deobfuscate.jar deleted file mode 100644 index 36a1b706a330716cc1e0db29ccec6a3d037db108..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3113 zcmbVPTUQ&`75+vNj4aDH4316X=9pNGB#e}}O_~U5jAh9omVros4TzG_%#k!OnxoFe zKuh~S(x*Ojtv)voCabLE2js1*|EKLfGb2!JvfNb(oiqEsXP)9)2qzG)f7s^z;~)9%#ys-x+=pj(>j{_yXAd{6AZ#-CJ_ zQ<%cz3#g!)#FG?0$6^v+B=IzfT8gmGl2}S%87nC$_)-pdt4aIvTL`xM@) zDPwZ^)RKfr_EN*s^u1?VKZuWtIR&YP@Ex6p0^_e|m^?NGQ*RGnnsPR$qML?^T?G%< zWGG9F>s`U_Ewd3raHb$G|OG>7qnzQbKtS}}N=m<(px?7UzW z6l^Y7t~kfJ9{0M!V7Ar^tP|jSj0#-nCI!d{j59mPd6yZ2m~7!Om-8O;MBr&54I6^e zZAL{ujo-mZqllb>bWnbjr}CKPQBhoP4sfsME@l3J^vG|wxxd zqQir;DMtNIiA`qbOIz!Y7oS(E+r>hDzL{nDrZL^jmDo|!Xc{weVOt;0d{!#3?dbt4 zBi0hj&670!5kPC{sM*rIT+^M-=*>c=*L3sEEG-$Ah_uUJ#-b%R-^}HX0y+5-&4TP| z&BBszSu29-nLDS|TF7rreXyPJq$nvRnN0hLRA)09>5#NSK2SBRXq`J|`>^6T!YQ$g z{sw(XG<>~VHMwO(OiO~YO?Fb9v$n*uOk1Y2ci89hRb?&)qk`2k<=M#h`Q_Fw*MqbV z_%b@NMl{KG3e0Ep_cJ|G6A_u|u5#Ba%h^K}=L~!-Yraca+B%e8Q&0zX!f+0tsSunx zr8ip?maVeBE%hC3t$N$!b*|YWgT(tk@kK#8vr34ZOrGDT@dq3Y<={{!P2-QUGOh-R z7!0N`APOExhyMSh`jO7Yq3dxfwm5g*?EGQ&Tg1`r(GPq~q4&9cf=#CY`B;Lnef-(X03xYowf^Q>EG#Z4o=82<7yF&X&{ur^p z2O`t7jt0mZxJS?FkO4C&5R{VbnlMuMOEM=vnQssuOT5B}+()BbjrJt%iEnX1DyJCJ zI!WrKa1wR8z%}T;woccw7v$vzF5p9a6slJ~CJxDYkpDjM7hKNA?~kQk;o{dA{TAsH zT-qSaG2I|eaAo5JSLt_c;~R{PT|dV7F(!_2<5}Xt3Em;XO(NWqgvr~-_*p3t|77F@ z?{19Uo*0=v#?SBFBJbb(8`972y}}*oZ9MQ;!F%Xnm%NJyo|kZ${AtqNa~tFMil{A= z(8e5b)roHnd-T+&CySnF(9ze0L*v2?LissV;-A5N{E|X9M>>Cnk4bkO6ZnMg#|hFf zhEEAMLh?y`M%*&Q2bd*pki8B31`jAc^8`&GaV_>A5{apJsM0lAQNfZ#Tp(f$k5-;4 nvGMp%U*Cx<`21Co+uJWt;bng1>4-cOGNaJB0pHO-1HgX)p<6ZV diff --git a/build/android/stacktrace/java_deobfuscate.py b/build/android/stacktrace/java_deobfuscate.py index 8c231ecfcc58..fa872d98ebf1 100755 --- a/build/android/stacktrace/java_deobfuscate.py +++ b/build/android/stacktrace/java_deobfuscate.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Wrapper script for java_deobfuscate. @@ -14,15 +14,11 @@ DIR_SOURCE_ROOT = os.path.normpath( os.path.join(os.path.dirname(__file__), '../../../')) - def main(): classpath = [ os.path.join(DIR_SOURCE_ROOT, 'build', 'android', 'stacktrace', - 'java_deobfuscate.jar'), - os.path.join(DIR_SOURCE_ROOT, 'third_party', 'proguard', 'lib', - 'proguard603.jar'), - os.path.join(DIR_SOURCE_ROOT, 'third_party', 'proguard', 'lib', - 'retrace603.jar'), + 'java_deobfuscate_java.jar'), + os.path.join(DIR_SOURCE_ROOT, 'third_party', 'r8', 'lib', 'r8.jar') ] java_path = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current', 'bin', 'java') @@ -32,6 +28,7 @@ def main(): 'org.chromium.build.FlushingReTrace' ] cmd.extend(sys.argv[1:]) + os.execvp(cmd[0], cmd) diff --git a/build/android/stacktrace/java_deobfuscate_java.jar b/build/android/stacktrace/java_deobfuscate_java.jar new file mode 100644 index 0000000000000000000000000000000000000000..8f31b76c932eecd715d04a223647f354f46933c7 GIT binary patch literal 7643 zcmc&(`*$0~75-MXv$DKS9LEXdL0E#7*fNpw021sFCsE|YL~)GmI2g->wY0W3S!vDg zDv8?^D9-}rRr&(@pe-$ZH>BX0qy_rqoSvT3zokF5KlHTyX0@{9S1eM_sgCwBGk5O2 zbLY-CckcEctEz57;dfi)fx5oO0|7u>5ms9Ecq~0(TUjHYjg96FGZX7F^Uj2k8}HKx zY%Q%f?~JBR&2j$n@2elO!yar1p$cncG|5;iW1WN*`;467?w3&2(mE)iI&Nij39FJu zPVdQQM|FEZ8#P(en6%QGIjGr&DEpPw?u6k;P`YzD-Hr=EI*VJAl_=IBA=Gc>?X=!y z2mwv=RF9s~&S}&nkxQGF!yz5l-3cor<0b_+W4(l6kM6ifEyq<*gB22*E0HRpLGX>4 zT5ddcV)Trjb|q{}TiKYF%h;BYiMf_#Ix%~1?69GY=g8hjJ4e}Q>b8U}%gdk*vzn2U zu&!k|S(4T7+8jei>mXDFI0q!-gh;%oJfAY(Vtjfvkt>kiWkmo%-bFKzLoM%0fdC=C zfh_8I4`3x$d8G!dMx$5LfI2=yq^7TkU+)}G`sAO_?0KyQ5Lbnsv}>Lx9S>3L9Wu7d zh}Kf|9YO3A&b2E)aV`=?J_!K?Bjs5eY%zrM!mUUCc}yy z%8!ldww@`p&_n)T-nmP7XW~LypL7i?=g7F13uIiUuQax_7SzqVh8as5j!V*NlgmY{ zkp=V331!3D*%5Z1nB~fT_5H4vK6_l7^wyIMF2mY>V?3w1d7Is~E|9fvp@jWH)f-w! zoeB=%;GBijh&6OvZxjorL&ExELw9a6@6w}n&7VXWhZMwdSVGOofv)X)6}n+U+`1Id zDHR;SQ5oF|?!z$&Yi9YBtf3$&WLL6?cP+YzV~Z|gTXzKx#ymrNkl3G}oHPyLcg;mL zPUaj|TDGfah&EP+3w3su=Ey26;?D#I=JK6#Ts_ORDsZFBSxRQ+Bu83GUR2hw&?5wn zuPG7xi%HZ5Ib#_C?S$K+Z+S@QqZ5UDHt~ErZkZG} zMHa@YNunM-gS8EAQ-uZz|b{R4P8cc zW6c$;xB~fo1Pj=>mQ(^_fb&Gc3s}du0zPiSO<2#;242L*zXK zr%{`1`vf)8rM4M_E+yNhp-7j!_Im16M{R{}bwoBHZ?gx~j}n(3qpZi-`UG2_q~=d? zS9;phK_zXFJvQT3N@^m0tGJDCLZ5e- zLkpkPXeCa!c{QyRRDgB?{`}2__P<^Fd-nhhDK61<`5kK>uzw;W8DiJtwdlc#AbLZ% zAALde2Qd)D$q;J?gE$q!0~it;%!47$;;`6u9u7gnsJNwNaHrNov>o?})ffw591|fJ zIMc{3Iq_x%F)5yp$go44A`%*K#LW#Mk8?84hiJJALXD{qK8MdY(lU?M(ejVW_+p3# zdP2sN3&M~3MnjtL5B5&CKgTJVkd*cK0Ms_a5x)Pn_;c z97(Y2XR-Y>R}c+xb=Tw_ZJg`!`oRMiPF9Lq9BUQ{g6400ESp+(G@~_($fIA=keGf@NyB+K1OkKE8D}UZP{jD*>+kdqR->Hh>$tT3gSXV z#7HupR`3j-m2lvsnD2H~Pfu8C^NB-UC;RENiNmMk$%Fm<>P1xm@#&0ijgIA=wC3t+ zmt{q@$+J=40c3nh!E<KJ*L*0_W>dMuaH56|)^%&XRz>P%%v8ULzQ z-gS(OuDTPtnznMTe!=yc1(K`YBsl7M!!*_NmVH*Wa^{qJenQWwA`o)atmdXCyuP+R zo;NjH)h|riy5k5b8GVdI8P&+Ci-r7ABFaJ2u@+G->zx49oR+1G^9GLtc~|A3!%7=8 zP{tFk=Dbsjqh>4;ectgRP_?Cn_Z)D+B7US$x@NIlY#&?BkZ^`(c zg74!84S1Wn{ICIW1wX<&3Vw{ANP$F4XZ!Hr;qFt3u8~+Y(wS;eBdJVVs>C-m(3j_NY42RwPwP+OD>CvN!)M@ZNqgHLAI-DW@%*PDR7nloLs{@D_I1$fvl% zCU32ssn*tug?>v@2x^ayPbxe#6%{csSB$J*Yu9fYBC(}C=Ro5TAZZ|r%CF!39{KjXE=R(I7>EuqiQ$<#0u|t%mru{ zb&P9yhfX^>CA6mfi*n{ztaD4DxMj9IHA=G#$I~O?xP5VWz#XC1t*KE8ByB}>zL82& zS<$NCr}$ZA&bdp$&+!Yf2!1K!R|+oS-KB#1%VKf;8qdmjPr>_`UR8sd20Sm^_lk@e z1s~w5gqy^C#t6zH6;KAu0y7D4iyYba3nSdP&(Go6??>NNy3JETDg5p&#IE9$!yRJw z&{9^-Q>+)@nj&m3kBI$v+RLcu7Yqi>)h*qkR5q02VIo9Nv|Z;E@oWh$4^obo-Vvu3 z`H)ETh_?d_;co5+EXP!(uuO3X%A61o?|q3gT(k|OFUqBJ526U)p@iMDt=x#7u) zeL7F%bT(JwfrK+VPqf~#%t+>%gl@r;*SuT?Suy^Uu-e!#G&EyL%3?maveaBi(8!E55?Y#Din@&Z4a)dP!N>T6 zf-W3Ua0qc8Xgq#icE`w_v(D5~ zT%SQ9rY@w?9iEB>Wk;C_{5`X*W=_WehB@Wrl{XedUcawz-|_*+HPh@jT%GCaKT8NN zFvw!#k+AvtbY3Dzq-6q328oQ0ggd;_c9x@M<@Nk7Ac-R*?>k9R{%s&o$bao(b>Z8< zM(pN$@!P!F&VR14B$^ zJ;+quF!N3iF(LCXb5t5XM(n}ff^O0pCcLUhc_#8<^;=jOsoK>TnnvC0STO@-2KAAv z%#&dntESQLF;*wruAy=03f4@c>C_C?GUc}JDl=oGxVdrtG&WqrMmBGn#x2v>eEg0X z-1-g_k(s+q=vU?GC=zQ!NKqeta-i%Urs04Lb4@y13?}=|uQr};_WO< zWc{~`_(Jb7DbQ54^ao5&0&j1N0acKxEPl?M&v*XktP 0: handler = logging_utils.ColorStreamHandler() else: handler = logging.StreamHandler(sys.stdout) - # pylint: enable=redefined-variable-type handler.setFormatter(run_tests_helper.CustomFormatter()) logging.getLogger().addHandler(handler) @@ -337,6 +360,12 @@ def AddEmulatorOptions(parser): action='store_true', default=False, help='Enable graphical window display on the emulator.') + parser.add_argument( + '--emulator-debug-tags', + help='Comma-separated list of debug tags. This can be used to enable or ' + 'disable debug messages from specific parts of the emulator, e.g. ' + 'init,snapshot. See "emulator -help-debug-tags" ' + 'for a full list of tags.') def AddGTestOptions(parser): @@ -353,10 +382,6 @@ def AddGTestOptions(parser): '--app-data-file-dir', help='Host directory to which app data files will be' ' saved. Used with --app-data-file.') - parser.add_argument( - '--delete-stale-data', - dest='delete_stale_data', action='store_true', - help='Delete stale test data on the device.') parser.add_argument( '--enable-xml-result-parsing', action='store_true', help=argparse.SUPPRESS) @@ -414,6 +439,12 @@ def AddGTestOptions(parser): '--coverage-dir', type=os.path.realpath, help='Directory in which to place all generated coverage files.') + parser.add_argument( + '--use-existing-test-data', + action='store_true', + help='Do not push new files to the device, instead using existing APK ' + 'and test data. Only use when running the same test for multiple ' + 'iterations.') def AddInstrumentationTestOptions(parser): @@ -421,12 +452,32 @@ def AddInstrumentationTestOptions(parser): parser = parser.add_argument_group('instrumentation arguments') + parser.add_argument('--additional-apex', + action='append', + dest='additional_apexs', + default=[], + type=_RealPath, + help='Additional apex that must be installed on ' + 'the device when the tests are run') parser.add_argument( '--additional-apk', action='append', dest='additional_apks', default=[], type=_RealPath, help='Additional apk that must be installed on ' 'the device when the tests are run') + parser.add_argument('--forced-queryable-additional-apk', + action='append', + dest='forced_queryable_additional_apks', + default=[], + type=_RealPath, + help='Configures an additional-apk to be forced ' + 'to be queryable by other APKs.') + parser.add_argument('--instant-additional-apk', + action='append', + dest='instant_additional_apks', + default=[], + type=_RealPath, + help='Configures an additional-apk to be an instant APK') parser.add_argument( '-A', '--annotation', dest='annotation_str', @@ -439,6 +490,11 @@ def AddInstrumentationTestOptions(parser): parser.add_argument( '--apk-under-test', help='Path or name of the apk under test.') + parser.add_argument( + '--store-data-in-app-directory', + action='store_true', + help='Store test data in the application\'s data directory. By default ' + 'the test data is stored in the external storage folder.') parser.add_argument( '--module', action='append', @@ -462,42 +518,29 @@ def AddInstrumentationTestOptions(parser): type=os.path.realpath, help='Directory in which to place all generated ' 'Jacoco coverage files.') - parser.add_argument( - '--delete-stale-data', - action='store_true', dest='delete_stale_data', - help='Delete stale test data on the device.') parser.add_argument( '--disable-dalvik-asserts', dest='set_asserts', action='store_false', default=True, help='Removes the dalvik.vm.enableassertions property') parser.add_argument( - '--enable-java-deobfuscation', - action='store_true', - help='Deobfuscate java stack traces in test output and logcat.') + '--proguard-mapping-path', + help='.mapping file to use to Deobfuscate java stack traces in test ' + 'output and logcat.') parser.add_argument( '-E', '--exclude-annotation', dest='exclude_annotation_str', help='Comma-separated list of annotations. Exclude tests with these ' 'annotations.') - def package_replacement(arg): - split_arg = arg.split(',') - if len(split_arg) != 2: - raise argparse.ArgumentError( - arg, - 'Expected two comma-separated strings for --replace-system-package, ' - 'received %d' % len(split_arg)) - PackageReplacement = collections.namedtuple('PackageReplacement', - ['package', 'replacement_apk']) - return PackageReplacement(package=split_arg[0], - replacement_apk=_RealPath(split_arg[1])) + parser.add_argument( + '--enable-breakpad-dump', + action='store_true', + help='Stores any breakpad dumps till the end of the test.') parser.add_argument( '--replace-system-package', - type=package_replacement, default=None, - help='Specifies a system package to replace with a given APK for the ' - 'duration of the test. Given as a comma-separated pair of strings, ' - 'the first element being the package and the second the path to the ' - 'replacement APK. Only supports replacing one package. Example: ' - '--replace-system-package com.example.app,path/to/some.apk') + type=_RealPath, + default=None, + help='Use this apk to temporarily replace a system package with the same ' + 'package name.') parser.add_argument( '--remove-system-package', default=[], @@ -507,7 +550,11 @@ def package_replacement(arg): 'on the system. WARNING: THIS WILL PERMANENTLY REMOVE THE SYSTEM APP. ' 'Unlike --replace-system-package, the app will not be restored after ' 'tests are finished.') - + parser.add_argument( + '--use-voice-interaction-service', + help='This can be used to update the voice interaction service to be a ' + 'custom one. This is useful for mocking assistants. eg: ' + 'android.assist.service/.MainInteractionService') parser.add_argument( '--use-webview-provider', type=_RealPath, default=None, @@ -515,6 +562,20 @@ def package_replacement(arg): 'The original provider will be restored if possible, ' "on Nougat the provider can't be determined and so " 'the system will choose the default provider.') + parser.add_argument( + '--run-setup-command', + default=[], + action='append', + dest='run_setup_commands', + help='This can be used to run a custom shell command on the device as a ' + 'setup step') + parser.add_argument( + '--run-teardown-command', + default=[], + action='append', + dest='run_teardown_commands', + help='This can be used to run a custom shell command on the device as a ' + 'teardown step') parser.add_argument( '--runtime-deps-path', dest='runtime_deps_path', type=os.path.realpath, @@ -554,12 +615,26 @@ def package_replacement(arg): required=True, help='Path or name of the apk containing the tests.') parser.add_argument( - '--test-jar', - help='Path of jar containing test java files.') + '--test-apk-as-instant', + action='store_true', + help='Install the test apk as an instant app. ' + 'Instant apps run in a more restrictive execution environment.') + parser.add_argument( + '--test-launcher-batch-limit', + dest='test_launcher_batch_limit', + type=int, + help=('Not actually used for instrumentation tests, but can be used as ' + 'a proxy for determining if the current run is a retry without ' + 'patch.')) parser.add_argument( '--timeout-scale', type=float, help='Factor by which timeouts should be scaled.') + parser.add_argument( + '--is-unit-test', + action='store_true', + help=('Specify the test suite as composed of unit tests, blocking ' + 'certain operations.')) parser.add_argument( '-w', '--wait-for-java-debugger', action='store_true', help='Wait for java debugger to attach before running any application ' @@ -572,6 +647,12 @@ def package_replacement(arg): help='If true, WPR server runs in record mode.' 'otherwise, runs in replay mode.') + parser.add_argument( + '--approve-app-links', + help='Force enables Digital Asset Link verification for the provided ' + 'package and domain, example usage: --approve-app-links ' + 'com.android.package:www.example.com') + # These arguments are suppressed from the help text because they should # only ever be specified by an intermediate script. parser.add_argument( @@ -686,6 +767,8 @@ def AddJUnitTestOptions(parser): parser.add_argument( '--robolectric-runtime-deps-dir', help='Path to runtime deps for Robolectric.') + parser.add_argument('--native-libs-dir', + help='Path to search for native libraries.') parser.add_argument( '--resource-apk', required=True, @@ -814,8 +897,44 @@ def RunTestsCommand(args, result_sink_client=None): if command == 'python': return _RunPythonTests(args) - else: - raise Exception('Unknown test type.') + raise Exception('Unknown test type.') + + +def _SinkTestResult(test_result, test_file_name, result_sink_client): + """Upload test result to result_sink. + + Args: + test_result: A BaseTestResult object + test_file_name: A string representing the file location of the test + result_sink_client: A ResultSinkClient object + + Returns: + N/A + """ + # Some tests put in non utf-8 char as part of the test + # which breaks uploads, so need to decode and re-encode. + log_decoded = test_result.GetLog() + if isinstance(log_decoded, bytes): + log_decoded = log_decoded.decode('utf-8', 'replace') + html_artifact = '' + https_artifacts = [] + for link_name, link_url in sorted(test_result.GetLinks().items()): + if link_url.startswith('https:'): + https_artifacts.append('

  • %s
  • ' % + (link_url, link_name)) + else: + logging.info('Skipping non-https link %r (%s) for test %s.', link_name, + link_url, test_result.GetName()) + if https_artifacts: + html_artifact += '
      %s
    ' % '\n'.join(https_artifacts) + result_sink_client.Post(test_result.GetNameForResultSink(), + test_result.GetType(), + test_result.GetDuration(), + log_decoded.encode('utf-8'), + test_file_name, + variant=test_result.GetVariantForResultSink(), + failure_reason=test_result.GetFailureReason(), + html_artifact=html_artifact) _SUPPORTED_IN_PLATFORM_MODE = [ @@ -927,12 +1046,7 @@ def json_writer(): match = re.search(r'^(.+\..+)#', r.GetName()) test_file_name = test_class_to_file_name_dict.get( match.group(1)) if match else None - # Some tests put in non utf-8 char as part of the test - # which breaks uploads, so need to decode and re-encode. - result_sink_client.Post( - r.GetName(), r.GetType(), r.GetDuration(), - r.GetLog().decode('utf-8', 'replace').encode('utf-8'), - test_file_name) + _SinkTestResult(r, test_file_name, result_sink_client) @contextlib.contextmanager def upload_logcats_file(): @@ -957,6 +1071,9 @@ def upload_logcats_file(): upload_logcats_file(), 'upload_logcats_file' in args and args.upload_logcats_file) + save_detailed_results = (args.local_output or not local_utils.IsOnSwarming() + ) and not args.isolated_script_test_output + ### Set up test objects. out_manager = output_manager_factory.CreateOutputManager(args) @@ -968,8 +1085,24 @@ def upload_logcats_file(): contexts_to_notify_on_sigterm.append(env) contexts_to_notify_on_sigterm.append(test_run) + if args.list_tests: + try: + with out_manager, env, test_instance, test_run: + test_names = test_run.GetTestsForListing() + print('There are {} tests:'.format(len(test_names))) + for n in test_names: + print(n) + return 0 + except NotImplementedError: + sys.stderr.write('Test does not support --list-tests (type={}).\n'.format( + args.command)) + return 1 + ### Run. with out_manager, json_finalizer(): + # |raw_logs_fh| is only used by Robolectric tests. + raw_logs_fh = io.StringIO() if save_detailed_results else None + with json_writer(), logcats_uploader, env, test_instance, test_run: repetitions = (range(args.repeat + @@ -985,7 +1118,7 @@ def upload_logcats_file(): raw_results = [] all_raw_results.append(raw_results) - test_run.RunTests(raw_results) + test_run.RunTests(raw_results, raw_logs_fh=raw_logs_fh) if not raw_results: all_raw_results.pop() continue @@ -1006,6 +1139,12 @@ def upload_logcats_file(): annotation=getattr(args, 'annotations', None), flakiness_server=getattr(args, 'flakiness_dashboard_server', None)) + + failed_tests = (iteration_results.GetNotPass() - + iteration_results.GetSkip()) + if failed_tests: + _LogRerunStatement(failed_tests, args.wrapper_script_args) + if args.break_on_failure and not iteration_results.DidRunPass(): break @@ -1034,8 +1173,17 @@ def upload_logcats_file(): str(tot_tests), str(iteration_count)) - if (args.local_output or not local_utils.IsOnSwarming() - ) and not args.isolated_script_test_output: + if save_detailed_results: + assert raw_logs_fh + raw_logs_fh.seek(0) + raw_logs = raw_logs_fh.read() + if raw_logs: + with out_manager.ArchivedTempfile( + 'raw_logs.txt', 'raw_logs', + output_manager.Datatype.TEXT) as raw_logs_file: + raw_logs_file.write(raw_logs) + logging.critical('RAW LOGS: %s', raw_logs_file.Link()) + with out_manager.ArchivedTempfile( 'test_results_presentation.html', 'test_results_presentation', @@ -1045,7 +1193,7 @@ def upload_logcats_file(): test_name=args.command, cs_base_url='http://cs.chromium.org', local_output=True) - results_detail_file.write(result_html_string.encode('utf-8')) + results_detail_file.write(result_html_string) results_detail_file.flush() logging.critical('TEST RESULTS: %s', results_detail_file.Link()) @@ -1063,6 +1211,66 @@ def upload_logcats_file(): else constants.ERROR_EXIT_CODE) +def _LogRerunStatement(failed_tests, wrapper_arg_str): + """Logs a message that can rerun the failed tests. + + Logs a copy/pasteable message that filters tests so just the failing tests + are run. + + Args: + failed_tests: A set of test results that did not pass. + wrapper_arg_str: A string of args that were passed to the called wrapper + script. + """ + rerun_arg_list = [] + try: + constants.CheckOutputDirectory() + # constants.CheckOutputDirectory throws bare exceptions. + except: # pylint: disable=bare-except + logging.exception('Output directory not found. Unable to generate failing ' + 'test filter file.') + return + + output_directory = constants.GetOutDirectory() + if not os.path.exists(output_directory): + logging.error('Output directory not found. Unable to generate failing ' + 'test filter file.') + return + + test_filter_file = os.path.join(os.path.relpath(output_directory), + _RERUN_FAILED_TESTS_FILE) + arg_list = shlex.split(wrapper_arg_str) if wrapper_arg_str else sys.argv + index = 0 + while index < len(arg_list): + arg = arg_list[index] + # Skip adding the filter= and/or the filter arg as we're replacing + # it with the new filter arg. + # This covers --test-filter=, --test-launcher-filter-file=, --gtest-filter=, + # --test-filter *Foobar.baz, -f *foobar, --package-filter , + # --runner-filter . + if 'filter' in arg or arg == '-f': + index += 1 if '=' in arg else 2 + continue + + rerun_arg_list.append(arg) + index += 1 + + failed_test_list = [str(t) for t in failed_tests] + with open(test_filter_file, 'w') as fp: + for t in failed_test_list: + # Test result names can have # in them that don't match when applied as + # a test name filter. + fp.write('%s\n' % t.replace('#', '.')) + + rerun_arg_list.append('--test-launcher-filter-file=%s' % test_filter_file) + msg = """ + %d Test(s) failed. + Rerun failed tests with copy and pastable command: + %s + """ + logging.critical(msg, len(failed_tests), shlex.join(rerun_arg_list)) + + def DumpThreadStacks(_signal, _frame): for thread in threading.enumerate(): reraiser_thread.LogThreadStack(thread) diff --git a/build/android/test_runner.pydeps b/build/android/test_runner.pydeps index 660f8f83794d..5c1cd13440c9 100644 --- a/build/android/test_runner.pydeps +++ b/build/android/test_runner.pydeps @@ -67,6 +67,7 @@ ../../third_party/catapult/devil/devil/android/sdk/shared_prefs.py ../../third_party/catapult/devil/devil/android/sdk/split_select.py ../../third_party/catapult/devil/devil/android/sdk/version_codes.py +../../third_party/catapult/devil/devil/android/settings.py ../../third_party/catapult/devil/devil/android/tools/__init__.py ../../third_party/catapult/devil/devil/android/tools/device_recovery.py ../../third_party/catapult/devil/devil/android/tools/device_status.py @@ -95,6 +96,7 @@ ../../third_party/catapult/devil/devil/utils/timeout_retry.py ../../third_party/catapult/devil/devil/utils/watchdog_timer.py ../../third_party/catapult/devil/devil/utils/zip_utils.py +../../third_party/catapult/third_party/six/six.py ../../third_party/colorama/src/colorama/__init__.py ../../third_party/colorama/src/colorama/ansi.py ../../third_party/colorama/src/colorama/ansitowin32.py @@ -102,7 +104,8 @@ ../../third_party/colorama/src/colorama/win32.py ../../third_party/colorama/src/colorama/winterm.py ../../third_party/jinja2/__init__.py -../../third_party/jinja2/_compat.py +../../third_party/jinja2/_identifier.py +../../third_party/jinja2/async_utils.py ../../third_party/jinja2/bccache.py ../../third_party/jinja2/compiler.py ../../third_party/jinja2/defaults.py @@ -119,30 +122,33 @@ ../../third_party/jinja2/tests.py ../../third_party/jinja2/utils.py ../../third_party/jinja2/visitor.py +../../third_party/logdog/logdog/__init__.py +../../third_party/logdog/logdog/bootstrap.py +../../third_party/logdog/logdog/stream.py +../../third_party/logdog/logdog/streamname.py +../../third_party/logdog/logdog/varint.py ../../third_party/markupsafe/__init__.py ../../third_party/markupsafe/_compat.py ../../third_party/markupsafe/_native.py -../../tools/swarming_client/libs/__init__.py -../../tools/swarming_client/libs/logdog/__init__.py -../../tools/swarming_client/libs/logdog/bootstrap.py -../../tools/swarming_client/libs/logdog/stream.py -../../tools/swarming_client/libs/logdog/streamname.py -../../tools/swarming_client/libs/logdog/varint.py +../action_helpers.py ../gn_helpers.py ../print_python_deps.py ../skia_gold_common/__init__.py ../skia_gold_common/skia_gold_properties.py ../skia_gold_common/skia_gold_session.py ../skia_gold_common/skia_gold_session_manager.py +../util/lib/__init__.py ../util/lib/common/chrome_test_server_spawner.py ../util/lib/common/unittest_util.py -convert_dex_profile.py +../util/lib/results/__init__.py +../util/lib/results/result_sink.py +../util/lib/results/result_types.py +../zip_helpers.py devil_chromium.py gyp/dex.py gyp/util/__init__.py gyp/util/build_utils.py gyp/util/md5_check.py -gyp/util/zipalign.py incremental_install/__init__.py incremental_install/installer.py pylib/__init__.py @@ -152,7 +158,6 @@ pylib/base/environment.py pylib/base/environment_factory.py pylib/base/output_manager.py pylib/base/output_manager_factory.py -pylib/base/result_sink.py pylib/base/test_collection.py pylib/base/test_exception.py pylib/base/test_instance.py @@ -204,6 +209,7 @@ pylib/results/presentation/test_results_presentation.py pylib/results/report_results.py pylib/symbols/__init__.py pylib/symbols/deobfuscator.py +pylib/symbols/expensive_line_transformer.py pylib/symbols/stack_symbolizer.py pylib/utils/__init__.py pylib/utils/chrome_proxy_utils.py @@ -216,7 +222,6 @@ pylib/utils/instrumentation_tracing.py pylib/utils/local_utils.py pylib/utils/logdog_helper.py pylib/utils/logging_utils.py -pylib/utils/proguard.py pylib/utils/repo_utils.py pylib/utils/shared_preference_utils.py pylib/utils/test_filter.py diff --git a/build/android/test_wrapper/logdog_wrapper.py b/build/android/test_wrapper/logdog_wrapper.py index 782d5d87abff..56206572e146 100755 --- a/build/android/test_wrapper/logdog_wrapper.py +++ b/build/android/test_wrapper/logdog_wrapper.py @@ -1,5 +1,5 @@ -#!/usr/bin/env vpython -# Copyright 2016 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -7,12 +7,15 @@ import argparse import contextlib +import json import logging import os import signal import subprocess import sys +import six + _SRC_PATH = os.path.abspath(os.path.join( os.path.dirname(__file__), '..', '..', '..')) sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'catapult', 'devil')) @@ -23,17 +26,17 @@ from devil.utils import timeout_retry from py_utils import tempfile_ext -PROJECT = 'chromium' OUTPUT = 'logdog' COORDINATOR_HOST = 'luci-logdog.appspot.com' -SERVICE_ACCOUNT_JSON = ('/creds/service_accounts' - '/service-account-luci-logdog-publisher.json') LOGDOG_TERMINATION_TIMEOUT = 30 def CommandParser(): # Parses the command line arguments being passed in - parser = argparse.ArgumentParser() + if six.PY3: + parser = argparse.ArgumentParser(allow_abbrev=False) + else: + parser = argparse.ArgumentParser() wrapped = parser.add_mutually_exclusive_group() wrapped.add_argument( '--target', @@ -71,6 +74,28 @@ def NoLeakingProcesses(popen): str(popen.pid)) +def GetProjectFromLuciContext(): + """Return the "project" from LUCI_CONTEXT. + + LUCI_CONTEXT contains a section "realm.name" whose value follows the format + ":". This method parses and return the "project" part. + + Fallback to "chromium" if realm name is None + """ + project = 'chromium' + ctx_path = os.environ.get('LUCI_CONTEXT') + if ctx_path: + try: + with open(ctx_path) as f: + luci_ctx = json.load(f) + realm_name = luci_ctx.get('realm', {}).get('name') + if realm_name: + project = realm_name.split(':')[0] + except (OSError, IOError, ValueError): + pass + return project + + def main(): parser = CommandParser() args, extra_cmd_args = parser.parse_known_args(sys.argv[1:]) @@ -99,18 +124,18 @@ def main(): 'butler.sock') prefix = os.path.join('android', 'swarming', 'logcats', os.environ.get('SWARMING_TASK_ID')) + project = GetProjectFromLuciContext() logdog_cmd = [ args.logdog_bin_cmd, - '-project', PROJECT, + '-project', project, '-output', OUTPUT, '-prefix', prefix, - '--service-account-json', SERVICE_ACCOUNT_JSON, '-coordinator-host', COORDINATOR_HOST, 'serve', '-streamserver-uri', streamserver_uri] test_env.update({ - 'LOGDOG_STREAM_PROJECT': PROJECT, + 'LOGDOG_STREAM_PROJECT': project, 'LOGDOG_STREAM_PREFIX': prefix, 'LOGDOG_STREAM_SERVER_PATH': streamserver_uri, 'LOGDOG_COORDINATOR_HOST': COORDINATOR_HOST, diff --git a/build/android/tests/symbolize/Makefile b/build/android/tests/symbolize/Makefile index 4fc53dad56ed..82c9ea53fa9f 100644 --- a/build/android/tests/symbolize/Makefile +++ b/build/android/tests/symbolize/Makefile @@ -1,4 +1,4 @@ -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/tests/symbolize/a.cc b/build/android/tests/symbolize/a.cc index f0c7ca4c67f4..67441185ab38 100644 --- a/build/android/tests/symbolize/a.cc +++ b/build/android/tests/symbolize/a.cc @@ -1,4 +1,4 @@ -// Copyright 2013 The Chromium Authors. All rights reserved. +// Copyright 2013 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/tests/symbolize/b.cc b/build/android/tests/symbolize/b.cc index db8752099aa7..9279977aa5b0 100644 --- a/build/android/tests/symbolize/b.cc +++ b/build/android/tests/symbolize/b.cc @@ -1,4 +1,4 @@ -// Copyright 2013 The Chromium Authors. All rights reserved. +// Copyright 2013 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/android/tombstones.py b/build/android/tombstones.py index 082e7c1c783d..430b284d1712 100755 --- a/build/android/tombstones.py +++ b/build/android/tombstones.py @@ -1,6 +1,6 @@ -#!/usr/bin/env vpython +#!/usr/bin/env vpython3 # -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # @@ -156,7 +156,7 @@ def _GetTombstonesForDevice(device, resolve_all_tombstones, return ret # Sort the tombstones in date order, descending - all_tombstones.sort(cmp=lambda a, b: cmp(b[1], a[1])) + all_tombstones.sort(key=lambda a: a[1], reverse=True) # Only resolve the most recent unless --all-tombstones given. tombstones = all_tombstones if resolve_all_tombstones else [all_tombstones[0]] diff --git a/build/android/unused_resources/BUILD.gn b/build/android/unused_resources/BUILD.gn new file mode 100644 index 000000000000..8eb56237bb08 --- /dev/null +++ b/build/android/unused_resources/BUILD.gn @@ -0,0 +1,19 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +java_binary("unused_resources") { + sources = [ "//build/android/unused_resources/UnusedResources.java" ] + main_class = "build.android.unused_resources.UnusedResources" + deps = [ + "//third_party/android_deps:com_android_tools_common_java", + "//third_party/android_deps:com_android_tools_layoutlib_layoutlib_api_java", + "//third_party/android_deps:com_android_tools_sdk_common_java", + "//third_party/android_deps:com_google_guava_guava_java", + "//third_party/kotlin_stdlib:kotlin_stdlib_java", + "//third_party/r8:r8_java", + ] + wrapper_script_name = "helper/unused_resources" +} diff --git a/build/android/gyp/resources_shrinker/Shrinker.java b/build/android/unused_resources/UnusedResources.java similarity index 88% rename from build/android/gyp/resources_shrinker/Shrinker.java rename to build/android/unused_resources/UnusedResources.java index 50e2f93e9bc2..079fa9602342 100644 --- a/build/android/gyp/resources_shrinker/Shrinker.java +++ b/build/android/unused_resources/UnusedResources.java @@ -15,11 +15,11 @@ */ // Modifications are owned by the Chromium Authors. -// Copyright 2021 The Chromium Authors. All rights reserved. +// Copyright 2021 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -package build.android.gyp.resources_shrinker; +package build.android.unused_resources; import static com.android.ide.common.symbols.SymbolIo.readFromAapt; import static com.android.utils.SdkUtils.endsWithIgnoreCase; @@ -40,10 +40,10 @@ import com.android.tools.r8.origin.PathOrigin; import com.android.utils.XmlUtils; import com.google.common.base.Charsets; -import com.google.common.base.Joiner; import com.google.common.collect.Maps; import com.google.common.io.ByteStreams; import com.google.common.io.Closeables; +import com.google.common.io.Files; import org.w3c.dom.Document; import org.w3c.dom.Node; @@ -54,7 +54,6 @@ import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; -import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; @@ -70,14 +69,14 @@ /** Copied with modifications from gradle core source - https://android.googlesource.com/platform/tools/base/+/master/build-system/gradle-core/src/main/groovy/com/android/build/gradle/tasks/ResourceUsageAnalyzer.java + https://cs.android.com/search?q=f:build-system.*ResourceUsageAnalyzer.java Modifications are mostly to: - Remove unused code paths to reduce complexity. - Reduce dependencies unless absolutely required. */ -public class Shrinker { +public class UnusedResources { private static final String ANDROID_RES = "android_res/"; private static final String DOT_DEX = ".dex"; private static final String DOT_CLASS = ".class"; @@ -97,9 +96,6 @@ public class Shrinker { private final StringWriter mDebugOutput; private final PrintWriter mDebugPrinter; - /** Easy way to invoke more verbose output for debugging */ - private boolean mDebug = false; - /** The computed set of unused resources */ private List mUnused; @@ -136,8 +132,8 @@ public V getSecond() { } } - public Shrinker(Iterable rTxtFiles, Iterable classes, Iterable manifests, - File mapping, Iterable resources, File reportFile) { + public UnusedResources(Iterable rTxtFiles, Iterable classes, + Iterable manifests, File mapping, Iterable resources, File reportFile) { mRTxtFiles = rTxtFiles; mProguardMapping = mapping; mClasses = classes; @@ -213,13 +209,11 @@ private void recordResources(Iterable resources) throws IOException, SAXException, ParserConfigurationException { for (File resDir : resources) { File[] resourceFolders = resDir.listFiles(); - if (resourceFolders != null) { - for (File folder : resourceFolders) { - ResourceFolderType folderType = - ResourceFolderType.getFolderType(folder.getName()); - if (folderType != null) { - recordResources(folderType, folder); - } + assert resourceFolders != null : "Invalid resource directory " + resDir; + for (File folder : resourceFolders) { + ResourceFolderType folderType = ResourceFolderType.getFolderType(folder.getName()); + if (folderType != null) { + recordResources(folderType, folder); } } } @@ -256,6 +250,10 @@ void recordMapping(File mapping) throws IOException { final String resourceString = ".R$"; Map nameMap = null; for (String line : Files.readLines(mapping, UTF_8)) { + // Ignore R8's mapping comments. + if (line.startsWith("#")) { + continue; + } if (line.startsWith(" ") || line.startsWith("\t")) { if (nameMap != null) { // We're processing the members of a resource class: record names into the map @@ -382,17 +380,24 @@ private void recordClassUsages(File file) throws IOException { } } + private String stringifyResource(Resource resource) { + return String.format("%s:%s:0x%08x", resource.type, resource.name, resource.value); + } + private void recordClassUsages(File file, String name, byte[] bytes) { assert name.endsWith(DOT_DEX); ReferenceChecker callback = new ReferenceChecker() { @Override public boolean shouldProcess(String internalName) { - return !isResourceClass(internalName + DOT_CLASS); + // We do not need to ignore R subclasses since R8 now removes + // unused resource id fields in R subclasses thus their + // remaining presence means real usage. + return true; } @Override public void referencedInt(int value) { - Shrinker.this.referencedInt("dex", value, file, name); + UnusedResources.this.referencedInt("dex", value, file, name); } @Override @@ -405,6 +410,12 @@ public void referencedStaticField(String internalName, String fieldName) { Resource resource = getResourceFromCode(internalName, fieldName); if (resource != null) { ResourceUsageModel.markReachable(resource); + if (mDebugPrinter != null) { + mDebugPrinter.println("Marking " + stringifyResource(resource) + + " reachable: referenced from dex" + + " in " + file + ":" + name + " (static field access " + + internalName + "." + fieldName + ")"); + } } } @@ -488,6 +499,12 @@ private void addResourcesFromRTxtFile(File file) { mModel.addResource(symbol.getResourceType(), symbol.getName(), null); } } else { + if (mDebugPrinter != null) { + mDebugPrinter.println("Extracted R.txt resource: " + + symbol.getResourceType() + ":" + symbol.getName() + ":" + + String.format( + "0x%08x", Integer.parseInt(symbolValue.substring(2), 16))); + } mModel.addResource(symbol.getResourceType(), symbol.getName(), symbolValue); } } @@ -502,10 +519,10 @@ ResourceUsageModel getModel() { private void referencedInt(String context, int value, File file, String currentClass) { Resource resource = mModel.getResource(value); - if (ResourceUsageModel.markReachable(resource) && mDebug) { - assert mDebugPrinter != null : "mDebug is true, but mDebugPrinter is null."; - mDebugPrinter.println("Marking " + resource + " reachable: referenced from " + context - + " in " + file + ":" + currentClass); + if (ResourceUsageModel.markReachable(resource) && mDebugPrinter != null) { + mDebugPrinter.println("Marking " + stringifyResource(resource) + + " reachable: referenced from " + context + " in " + file + ":" + + currentClass); } } @@ -530,8 +547,10 @@ protected boolean ignoreToolsAttributes() { @Override protected void onRootResourcesFound(List roots) { if (mDebugPrinter != null) { - mDebugPrinter.println( - "\nThe root reachable resources are:\n" + Joiner.on(",\n ").join(roots)); + mDebugPrinter.println("\nThe root reachable resources are:"); + for (Resource root : roots) { + mDebugPrinter.println(" " + stringifyResource(root) + ","); + } } } @@ -563,7 +582,7 @@ public static void main(String[] args) throws Exception { .map(s -> new File(s)) .collect(Collectors.toList()); break; - case "--dex": + case "--dexes": classes = Arrays.stream(args[i + 1].split(":")) .map(s -> new File(s)) .collect(Collectors.toList()); @@ -591,9 +610,10 @@ public static void main(String[] args) throws Exception { throw new IllegalArgumentException(args[i] + " is not a valid arg."); } } - Shrinker shrinker = new Shrinker(rTxtFiles, classes, manifests, mapping, resources, log); - shrinker.analyze(); - shrinker.close(); - shrinker.emitConfig(configPath); + UnusedResources unusedResources = + new UnusedResources(rTxtFiles, classes, manifests, mapping, resources, log); + unusedResources.analyze(); + unusedResources.close(); + unusedResources.emitConfig(configPath); } } diff --git a/build/android/update_deps/update_third_party_deps.py b/build/android/update_deps/update_third_party_deps.py index 3a869c43ec47..50c0e225f095 100755 --- a/build/android/update_deps/update_third_party_deps.py +++ b/build/android/update_deps/update_third_party_deps.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2016 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/android/update_verification.py b/build/android/update_verification.py index 3d478f4cf005..55a403e855db 100755 --- a/build/android/update_verification.py +++ b/build/android/update_verification.py @@ -1,6 +1,6 @@ -#!/usr/bin/env vpython +#!/usr/bin/env vpython3 # -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -28,6 +28,8 @@ import logging import sys +# import raw_input when converted to python3 +from six.moves import input # pylint: disable=redefined-builtin import devil_chromium from devil.android import apk_helper @@ -36,10 +38,11 @@ from devil.android import device_utils from devil.utils import run_tests_helper + def CreateAppData(device, old_apk, app_data, package_name): device.Install(old_apk) - raw_input('Set the application state. Once ready, press enter and ' - 'select "Backup my data" on the device.') + input('Set the application state. Once ready, press enter and ' + 'select "Backup my data" on the device.') device.adb.Backup(app_data, packages=[package_name]) logging.critical('Application data saved to %s', app_data) @@ -47,8 +50,8 @@ def TestUpdate(device, old_apk, new_apk, app_data, package_name): device.Install(old_apk) device.adb.Restore(app_data) # Restore command is not synchronous - raw_input('Select "Restore my data" on the device. Then press enter to ' - 'continue.') + input('Select "Restore my data" on the device. Then press enter to ' + 'continue.') if not device.IsApplicationInstalled(package_name): raise Exception('Expected package %s to already be installed. ' 'Package name might have changed!' % package_name) diff --git a/build/android/video_recorder.py b/build/android/video_recorder.py index 6c54e7a55f7f..39387797536e 100755 --- a/build/android/video_recorder.py +++ b/build/android/video_recorder.py @@ -1,5 +1,5 @@ -#!/usr/bin/env vpython -# Copyright 2015 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/apple/apple_info_plist.gni b/build/apple/apple_info_plist.gni index fe51773dd001..bf66dbd6b78c 100644 --- a/build/apple/apple_info_plist.gni +++ b/build/apple/apple_info_plist.gni @@ -1,4 +1,4 @@ -# Copyright 2021 The Chromium Authors. All rights reserved. +# Copyright 2021 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/apple/compile_entitlements.gni b/build/apple/compile_entitlements.gni index 006d5ac2d550..1f84a115d0b1 100644 --- a/build/apple/compile_entitlements.gni +++ b/build/apple/compile_entitlements.gni @@ -1,4 +1,4 @@ -# Copyright 2021 The Chromium Authors. All rights reserved. +# Copyright 2021 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/apple/compile_plist.gni b/build/apple/compile_plist.gni index 90485b6a2a7f..df8de0c21c1b 100644 --- a/build/apple/compile_plist.gni +++ b/build/apple/compile_plist.gni @@ -1,4 +1,4 @@ -# Copyright 2021 The Chromium Authors. All rights reserved. +# Copyright 2021 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/apple/convert_plist.gni b/build/apple/convert_plist.gni index a1134d9b3110..740bfc77b508 100644 --- a/build/apple/convert_plist.gni +++ b/build/apple/convert_plist.gni @@ -1,4 +1,4 @@ -# Copyright 2021 The Chromium Authors. All rights reserved. +# Copyright 2021 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/apple/plist_util.py b/build/apple/plist_util.py index 54cf46176bc2..016a06ac4a5e 100644 --- a/build/apple/plist_util.py +++ b/build/apple/plist_util.py @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -12,11 +12,6 @@ import tempfile import shlex -if sys.version_info.major < 3: - basestring_compat = basestring -else: - basestring_compat = str - # Xcode substitutes variables like ${PRODUCT_NAME} or $(PRODUCT_NAME) when # compiling Info.plist. It also supports supports modifiers like :identifier # or :rfc1034identifier. SUBSTITUTION_REGEXP_LIST is a list of regular @@ -89,46 +84,30 @@ def Interpolate(value, substitutions): return {k: Interpolate(v, substitutions) for k, v in value.items()} if isinstance(value, list): return [Interpolate(v, substitutions) for v in value] - if isinstance(value, basestring_compat): + if isinstance(value, str): return InterpolateString(value, substitutions) return value def LoadPList(path): """Loads Plist at |path| and returns it as a dictionary.""" - if sys.version_info.major == 2: - fd, name = tempfile.mkstemp() - try: - subprocess.check_call(['plutil', '-convert', 'xml1', '-o', name, path]) - with os.fdopen(fd, 'rb') as f: - return plistlib.readPlist(f) - finally: - os.unlink(name) - else: - with open(path, 'rb') as f: - return plistlib.load(f) + with open(path, 'rb') as f: + return plistlib.load(f) def SavePList(path, format, data): """Saves |data| as a Plist to |path| in the specified |format|.""" - # The below does not replace the destination file but update it in place, - # so if more than one hardlink points to destination all of them will be - # modified. This is not what is expected, so delete destination file if - # it does exist. - if os.path.exists(path): + # The open() call does not replace the destination file but updates it + # in place, so if more than one hardlink points to destination all of them + # will be modified. This is not what is expected, so delete destination file + # if it does exist. + try: os.unlink(path) - if sys.version_info.major == 2: - fd, name = tempfile.mkstemp() - try: - with os.fdopen(fd, 'wb') as f: - plistlib.writePlist(data, f) - subprocess.check_call(['plutil', '-convert', format, '-o', path, name]) - finally: - os.unlink(name) - else: - with open(path, 'wb') as f: - plist_format = {'binary1': plistlib.FMT_BINARY, 'xml1': plistlib.FMT_XML} - plistlib.dump(data, f, fmt=plist_format[format]) + except FileNotFoundError: + pass + with open(path, 'wb') as f: + plist_format = {'binary1': plistlib.FMT_BINARY, 'xml1': plistlib.FMT_XML} + plistlib.dump(data, f, fmt=plist_format[format]) def MergePList(plist1, plist2): @@ -243,10 +222,6 @@ def _Execute(args): def Main(): - # Cache this codec so that plistlib can find it. See - # https://crbug.com/1005190#c2 for more details. - codecs.lookup('utf-8') - parser = argparse.ArgumentParser(description='manipulate plist files') subparsers = parser.add_subparsers() @@ -258,8 +233,4 @@ def Main(): if __name__ == '__main__': - # TODO(https://crbug.com/941669): Temporary workaround until all scripts use - # python3 by default. - if sys.version_info[0] < 3: - os.execvp('python3', ['python3'] + sys.argv) sys.exit(Main()) diff --git a/build/apple/tweak_info_plist.gni b/build/apple/tweak_info_plist.gni index 33f22ca2d087..347c5d58d840 100644 --- a/build/apple/tweak_info_plist.gni +++ b/build/apple/tweak_info_plist.gni @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/apple/tweak_info_plist.py b/build/apple/tweak_info_plist.py index 76f64dc37cc0..8aa28b002108 100755 --- a/build/apple/tweak_info_plist.py +++ b/build/apple/tweak_info_plist.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -20,7 +20,6 @@ # by the time the app target is done, the info.plist is correct. # -from __future__ import print_function import optparse import os @@ -229,6 +228,16 @@ def _RemoveGTMKeys(plist): _RemoveKeys(plist, 'GTMUserAgentID', 'GTMUserAgentVersion') +def _AddPrivilegedHelperId(plist, privileged_helper_id): + plist['SMPrivilegedExecutables'] = { + privileged_helper_id: 'identifier ' + privileged_helper_id + } + + +def _RemovePrivilegedHelperId(plist): + _RemoveKeys(plist, 'SMPrivilegedExecutables') + + def Main(argv): parser = optparse.OptionParser('%prog [options]') parser.add_option('--plist', @@ -289,9 +298,6 @@ def Main(argv): type='int', default=False, help='Add GTM metadata [1 or 0]') - # TODO(crbug.com/1140474): Remove once iOS 14.2 reaches mass adoption. - parser.add_option('--lock-to-version', - help='Set CFBundleVersion to given value + @MAJOR@@PATH@') parser.add_option( '--version-overrides', action='append', @@ -309,6 +315,12 @@ def Main(argv): type='string', default=None, help='The version string [major.minor.build.patch]') + parser.add_option('--privileged_helper_id', + dest='privileged_helper_id', + action='store', + type='string', + default=None, + help='The id of the privileged helper executable.') (options, args) = parser.parse_args(argv) if len(args) > 0: @@ -359,25 +371,10 @@ def Main(argv): 'CFBundleVersion': '@BUILD@.@PATCH@', } else: - # TODO(crbug.com/1140474): Remove once iOS 14.2 reaches mass adoption. - if options.lock_to_version: - # Pull in the PATCH number and format it to 3 digits. - VERSION_TOOL = os.path.join(TOP, 'build/util/version.py') - VERSION_FILE = os.path.join(TOP, 'chrome/VERSION') - (stdout, - retval) = _GetOutput([VERSION_TOOL, '-f', VERSION_FILE, '-t', '@PATCH@']) - if retval != 0: - return 2 - patch = '{:03d}'.format(int(stdout)) - version_format_for_key = { - 'CFBundleShortVersionString': '@MAJOR@.@BUILD@.@PATCH@', - 'CFBundleVersion': options.lock_to_version + '.@MAJOR@' + patch - } - else: - version_format_for_key = { - 'CFBundleShortVersionString': '@MAJOR@.@BUILD@.@PATCH@', - 'CFBundleVersion': '@MAJOR@.@MINOR@.@BUILD@.@PATCH@' - } + version_format_for_key = { + 'CFBundleShortVersionString': '@MAJOR@.@BUILD@.@PATCH@', + 'CFBundleVersion': '@MAJOR@.@MINOR@.@BUILD@.@PATCH@' + } if options.use_breakpad: version_format_for_key['BreakpadVersion'] = \ @@ -423,6 +420,12 @@ def Main(argv): else: _RemoveGTMKeys(plist) + # Add SMPrivilegedExecutables keys. + if options.privileged_helper_id: + _AddPrivilegedHelperId(plist, options.privileged_helper_id) + else: + _RemovePrivilegedHelperId(plist) + output_path = options.plist_path if options.plist_output is not None: output_path = options.plist_output diff --git a/build/apple/write_pkg_info.py b/build/apple/write_pkg_info.py index 8d07cdb3114e..2f59c2f732fa 100644 --- a/build/apple/write_pkg_info.py +++ b/build/apple/write_pkg_info.py @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -24,8 +24,10 @@ def Main(): args = parser.parse_args() # Remove the output if it exists already. - if os.path.exists(args.output): + try: os.unlink(args.output) + except FileNotFoundError: + pass plist = plist_util.LoadPList(args.plist) package_type = plist['CFBundlePackageType'] @@ -47,8 +49,4 @@ def Main(): if __name__ == '__main__': - # TODO(https://crbug.com/941669): Temporary workaround until all scripts use - # python3 by default. - if sys.version_info[0] < 3: - os.execvp('python3', ['python3'] + sys.argv) sys.exit(Main()) diff --git a/build/apple/xcrun.py b/build/apple/xcrun.py index 71bf50c35204..011dd477fabd 100755 --- a/build/apple/xcrun.py +++ b/build/apple/xcrun.py @@ -1,5 +1,5 @@ -#!/usr/bin/python3 -# Copyright 2020 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ diff --git a/build/args/README.txt b/build/args/README.txt index 825bf64c6935..b82fb04ab28a 100644 --- a/build/args/README.txt +++ b/build/args/README.txt @@ -1,10 +1,6 @@ This directory is here to hold .gni files that contain sets of GN build arguments for given configurations. -(Currently this directory is empty because we removed the only thing here, but -this has come up several times so I'm confident we'll need this again. If this -directory is still empty by 2017, feel free to delete it. --Brett) - Some projects or bots may have build configurations with specific combinations of flags. Rather than making a new global flag for your specific project and adding it all over the build to each arg it should affect, you can add a .gni diff --git a/build/args/chromeos/README.md b/build/args/chromeos/README.md index e02e185519c2..284225279d09 100644 --- a/build/args/chromeos/README.md +++ b/build/args/chromeos/README.md @@ -1,4 +1,6 @@ -This directory is used to store GN arg mapping for Chrome OS boards. +This directory is used to store GN arg mapping for Chrome OS boards. The values +of the args are determined by processing the [chromeos-chrome ebuild] for a +given board and a given ChromeOS version (stored in the [CHROMEOS_LKGM] file). Files in this directory are populated by running `gclient sync` with specific arguments set in the .gclient file. Specifically: @@ -50,3 +52,6 @@ goma_dir = "/path/to/goma/" TODO(bpastene): Make 'cros_boards' a first class citizen in gclient and replace it with 'target_boards' instead. + +[chromeos-chrome ebuild]: https://chromium.googlesource.com/chromiumos/overlays/chromiumos-overlay/+/HEAD/chromeos-base/chromeos-chrome/chromeos-chrome-9999.ebuild +[CHROMEOS_LKGM]: https://chromium.googlesource.com/chromium/src/+/HEAD/chromeos/CHROMEOS_LKGM diff --git a/build/args/headless.gn b/build/args/headless.gn index 9b8392c1d82d..8834eb1bb4ff 100644 --- a/build/args/headless.gn +++ b/build/args/headless.gn @@ -11,12 +11,14 @@ use_ozone = true ozone_auto_platforms = false ozone_platform = "headless" ozone_platform_headless = true +angle_enable_vulkan = true +angle_enable_swiftshader = true # Embed resource.pak into binary to simplify deployment. headless_use_embedded_resources = true -# Expose headless bindings for freetype library bundled with Chromium. -headless_fontconfig_utils = true +# Disable headless commands support. +headless_enable_commands = false # Don't use Prefs component, disabling access to Local State prefs. headless_use_prefs = false @@ -39,6 +41,7 @@ enable_nacl = false enable_print_preview = false enable_remoting = false use_alsa = false +use_bluez = false use_cups = false use_dbus = false use_gio = false @@ -51,6 +54,3 @@ v8_enable_lazy_source_positions = false use_glib = false use_gtk = false use_pangocairo = false - -# TODO(1096425): Remove this once use_x11 goes away. -use_x11 = false diff --git a/build/build-ctags.sh b/build/build-ctags.sh index 61e017e32988..d7756a2ba663 100755 --- a/build/build-ctags.sh +++ b/build/build-ctags.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/build_config.h b/build/build_config.h index 0cce67c4189f..798b49467a07 100644 --- a/build/build_config.h +++ b/build/build_config.h @@ -1,34 +1,50 @@ -// Copyright (c) 2012 The Chromium Authors. All rights reserved. +// Copyright 2012 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -// This file adds defines about the platform we're currently building on. +// This file doesn't belong to any GN target by design for faster build and +// less developer overhead. + +// This file adds build flags about the OS we're currently building on. They are +// defined directly in this file instead of via a `buildflag_header` target in a +// GN file for faster build. They are defined using the corresponding OS defines +// (e.g. OS_WIN) which are also defined in this file (except for OS_CHROMEOS, +// which is set by the build system). These defines are deprecated and should +// NOT be used directly. For example: +// Please Use: #if BUILDFLAG(IS_WIN) +// Deprecated: #if defined(OS_WIN) // // Operating System: -// OS_AIX / OS_ANDROID / OS_ASMJS / OS_FREEBSD / OS_FUCHSIA / OS_IOS / -// OS_LINUX / OS_MAC / OS_NACL (SFI or NONSFI) / OS_NETBSD / OS_OPENBSD / -// OS_QNX / OS_SOLARIS / OS_WIN +// IS_AIX / IS_ANDROID / IS_ASMJS / IS_CHROMEOS / IS_FREEBSD / IS_FUCHSIA / +// IS_IOS / IS_IOS_MACCATALYST / IS_LINUX / IS_MAC / IS_NACL / IS_NETBSD / +// IS_OPENBSD / IS_QNX / IS_SOLARIS / IS_WIN // Operating System family: -// OS_APPLE: IOS or MAC -// OS_BSD: FREEBSD or NETBSD or OPENBSD -// OS_POSIX: AIX or ANDROID or ASMJS or CHROMEOS or FREEBSD or IOS or LINUX +// IS_APPLE: IOS or MAC or IOS_MACCATALYST +// IS_BSD: FREEBSD or NETBSD or OPENBSD +// IS_POSIX: AIX or ANDROID or ASMJS or CHROMEOS or FREEBSD or IOS or LINUX // or MAC or NACL or NETBSD or OPENBSD or QNX or SOLARIS + +// This file also adds defines specific to the platform, architecture etc. // -// /!\ Note: OS_CHROMEOS is set by the build system, not this file +// Platform: +// IS_OZONE // // Compiler: // COMPILER_MSVC / COMPILER_GCC // // Processor: -// ARCH_CPU_ARM64 / ARCH_CPU_ARMEL / ARCH_CPU_MIPS / ARCH_CPU_MIPS64 / -// ARCH_CPU_MIPS64EL / ARCH_CPU_MIPSEL / ARCH_CPU_PPC64 / ARCH_CPU_S390 / -// ARCH_CPU_S390X / ARCH_CPU_X86 / ARCH_CPU_X86_64 +// ARCH_CPU_ARM64 / ARCH_CPU_ARMEL / ARCH_CPU_LOONG32 / ARCH_CPU_LOONG64 / +// ARCH_CPU_MIPS / ARCH_CPU_MIPS64 / ARCH_CPU_MIPS64EL / ARCH_CPU_MIPSEL / +// ARCH_CPU_PPC64 / ARCH_CPU_S390 / ARCH_CPU_S390X / ARCH_CPU_X86 / +// ARCH_CPU_X86_64 / ARCH_CPU_RISCV64 // Processor family: // ARCH_CPU_ARM_FAMILY: ARMEL or ARM64 +// ARCH_CPU_LOONG_FAMILY: LOONG32 or LOONG64 // ARCH_CPU_MIPS_FAMILY: MIPS64EL or MIPSEL or MIPS64 or MIPS // ARCH_CPU_PPC64_FAMILY: PPC64 // ARCH_CPU_S390_FAMILY: S390 or S390X // ARCH_CPU_X86_FAMILY: X86 or X86_64 +// ARCH_CPU_RISCV_FAMILY: Riscv64 // Processor features: // ARCH_CPU_31_BITS / ARCH_CPU_32_BITS / ARCH_CPU_64_BITS // ARCH_CPU_BIG_ENDIAN / ARCH_CPU_LITTLE_ENDIAN @@ -36,20 +52,14 @@ #ifndef BUILD_BUILD_CONFIG_H_ #define BUILD_BUILD_CONFIG_H_ +#include "build/buildflag.h" // IWYU pragma: export + // A set of macros to use for platform detection. #if defined(STARBOARD) // noop #elif defined(__native_client__) // __native_client__ must be first, so that other OS_ defines are not set. #define OS_NACL 1 -// OS_NACL comes in two sandboxing technology flavors, SFI or Non-SFI. -// PNaCl toolchain defines __native_client_nonsfi__ macro in Non-SFI build -// mode, while it does not in SFI build mode. -#if defined(__native_client_nonsfi__) -#define OS_NACL_NONSFI -#else -#define OS_NACL_SFI -#endif #elif defined(ANDROID) #define OS_ANDROID 1 #elif defined(__APPLE__) @@ -59,6 +69,11 @@ #include #if defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE #define OS_IOS 1 +// Catalyst is the technology that allows running iOS apps on macOS. These +// builds are both OS_IOS and OS_IOS_MACCATALYST. +#if defined(TARGET_OS_MACCATALYST) && TARGET_OS_MACCATALYST +#define OS_IOS_MACCATALYST +#endif // defined(TARGET_OS_MACCATALYST) && TARGET_OS_MACCATALYST #else #define OS_MAC 1 #endif // defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE @@ -69,7 +84,7 @@ #define OS_LINUX 1 #endif // !defined(OS_CHROMEOS) // Include a system header to pull in features.h for glibc/uclibc macros. -#include +#include #if defined(__GLIBC__) && !defined(__UCLIBC__) // We really are using glibc, not uClibc pretending to be glibc. #define LIBC_GLIBC 1 @@ -92,6 +107,8 @@ #define OS_AIX 1 #elif defined(__asmjs__) || defined(__wasm__) #define OS_ASMJS 1 +#elif defined(__MVS__) +#define OS_ZOS 1 #else #error Please add support for your platform in build/build_config.h #endif @@ -114,10 +131,131 @@ defined(OS_FREEBSD) || defined(OS_IOS) || defined(OS_LINUX) || \ defined(OS_CHROMEOS) || defined(OS_MAC) || defined(OS_NACL) || \ defined(OS_NETBSD) || defined(OS_OPENBSD) || defined(OS_QNX) || \ - defined(OS_SOLARIS) + defined(OS_SOLARIS) || defined(OS_ZOS) #define OS_POSIX 1 #endif +// OS build flags +#if defined(OS_AIX) +#define BUILDFLAG_INTERNAL_IS_AIX() (1) +#else +#define BUILDFLAG_INTERNAL_IS_AIX() (0) +#endif + +#if defined(OS_ANDROID) +#define BUILDFLAG_INTERNAL_IS_ANDROID() (1) +#else +#define BUILDFLAG_INTERNAL_IS_ANDROID() (0) +#endif + +#if defined(OS_APPLE) +#define BUILDFLAG_INTERNAL_IS_APPLE() (1) +#else +#define BUILDFLAG_INTERNAL_IS_APPLE() (0) +#endif + +#if defined(OS_ASMJS) +#define BUILDFLAG_INTERNAL_IS_ASMJS() (1) +#else +#define BUILDFLAG_INTERNAL_IS_ASMJS() (0) +#endif + +#if defined(OS_BSD) +#define BUILDFLAG_INTERNAL_IS_BSD() (1) +#else +#define BUILDFLAG_INTERNAL_IS_BSD() (0) +#endif + +#if defined(OS_CHROMEOS) +#define BUILDFLAG_INTERNAL_IS_CHROMEOS() (1) +#else +#define BUILDFLAG_INTERNAL_IS_CHROMEOS() (0) +#endif + +#if defined(OS_FREEBSD) +#define BUILDFLAG_INTERNAL_IS_FREEBSD() (1) +#else +#define BUILDFLAG_INTERNAL_IS_FREEBSD() (0) +#endif + +#if defined(OS_FUCHSIA) +#define BUILDFLAG_INTERNAL_IS_FUCHSIA() (1) +#else +#define BUILDFLAG_INTERNAL_IS_FUCHSIA() (0) +#endif + +#if defined(OS_IOS) +#define BUILDFLAG_INTERNAL_IS_IOS() (1) +#else +#define BUILDFLAG_INTERNAL_IS_IOS() (0) +#endif + +#if defined(OS_IOS_MACCATALYST) +#define BUILDFLAG_INTERNAL_IS_IOS_MACCATALYST() (1) +#else +#define BUILDFLAG_INTERNAL_IS_IOS_MACCATALYST() (0) +#endif + +#if defined(OS_LINUX) +#define BUILDFLAG_INTERNAL_IS_LINUX() (1) +#else +#define BUILDFLAG_INTERNAL_IS_LINUX() (0) +#endif + +#if defined(OS_MAC) +#define BUILDFLAG_INTERNAL_IS_MAC() (1) +#else +#define BUILDFLAG_INTERNAL_IS_MAC() (0) +#endif + +#if defined(OS_NACL) +#define BUILDFLAG_INTERNAL_IS_NACL() (1) +#else +#define BUILDFLAG_INTERNAL_IS_NACL() (0) +#endif + +#if defined(OS_NETBSD) +#define BUILDFLAG_INTERNAL_IS_NETBSD() (1) +#else +#define BUILDFLAG_INTERNAL_IS_NETBSD() (0) +#endif + +#if defined(OS_OPENBSD) +#define BUILDFLAG_INTERNAL_IS_OPENBSD() (1) +#else +#define BUILDFLAG_INTERNAL_IS_OPENBSD() (0) +#endif + +#if defined(OS_POSIX) +#define BUILDFLAG_INTERNAL_IS_POSIX() (1) +#else +#define BUILDFLAG_INTERNAL_IS_POSIX() (0) +#endif + +#if defined(OS_QNX) +#define BUILDFLAG_INTERNAL_IS_QNX() (1) +#else +#define BUILDFLAG_INTERNAL_IS_QNX() (0) +#endif + +#if defined(OS_SOLARIS) +#define BUILDFLAG_INTERNAL_IS_SOLARIS() (1) +#else +#define BUILDFLAG_INTERNAL_IS_SOLARIS() (0) +#endif + +#if defined(OS_WIN) +#define BUILDFLAG_INTERNAL_IS_WIN() (1) +#else +#define BUILDFLAG_INTERNAL_IS_WIN() (0) +#endif + +#if defined(USE_OZONE) +#define BUILDFLAG_INTERNAL_IS_OZONE() (1) +#else +#define BUILDFLAG_INTERNAL_IS_OZONE() (0) +#endif + // Compiler detection. Note: clang masquerades as GCC on POSIX and as MSVC on // Windows. #if defined(__GNUC__) @@ -225,6 +363,21 @@ #define ARCH_CPU_32_BITS 1 #define ARCH_CPU_BIG_ENDIAN 1 #endif +#elif defined(__loongarch32) +#define ARCH_CPU_LOONG_FAMILY 1 +#define ARCH_CPU_LOONG32 1 +#define ARCH_CPU_32_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#elif defined(__loongarch64) +#define ARCH_CPU_LOONG_FAMILY 1 +#define ARCH_CPU_LOONG64 1 +#define ARCH_CPU_64_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#elif defined(__riscv) && (__riscv_xlen == 64) +#define ARCH_CPU_RISCV_FAMILY 1 +#define ARCH_CPU_RISCV64 1 +#define ARCH_CPU_64_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 #else #error Please add support for your architecture in build/build_config.h #endif diff --git a/build/buildflag.h b/build/buildflag.h index 5776a754c42c..634697986cea 100644 --- a/build/buildflag.h +++ b/build/buildflag.h @@ -1,4 +1,4 @@ -// Copyright 2015 The Chromium Authors. All rights reserved. +// Copyright 2015 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/buildflag_header.gni b/build/buildflag_header.gni index 821c4efe45f5..f7b42f724dd7 100644 --- a/build/buildflag_header.gni +++ b/build/buildflag_header.gni @@ -1,4 +1,4 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/check_gn_headers.py b/build/check_gn_headers.py index 9bdbba895f09..6bfb878a0a2e 100755 --- a/build/check_gn_headers.py +++ b/build/check_gn_headers.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2017 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -9,8 +9,6 @@ dependency generated by the compiler, and report if they don't exist in GN. """ -from __future__ import print_function - import argparse import json import os @@ -30,7 +28,10 @@ def GetHeadersFromNinja(out_dir, skip_obj, q): """Return all the header files from ninja_deps""" def NinjaSource(): - cmd = [os.path.join(DEPOT_TOOLS_DIR, 'ninja'), '-C', out_dir, '-t', 'deps'] + cmd = [ + os.path.join(SRC_DIR, 'third_party', 'ninja', 'ninja'), '-C', out_dir, + '-t', 'deps' + ] # A negative bufsize means to use the system default, which usually # means fully buffered. popen = subprocess.Popen(cmd, stdout=subprocess.PIPE, bufsize=-1) @@ -112,7 +113,7 @@ def ParseGNProjectJSON(gn, out_dir, tmp_out): """Parse GN output and get the header files""" all_headers = set() - for _target, properties in gn['targets'].iteritems(): + for _target, properties in gn['targets'].items(): sources = properties.get('sources', []) public = properties.get('public', []) # Exclude '"public": "*"'. @@ -294,7 +295,7 @@ def PrintError(msg): print(' ', cc) print('\nMissing headers sorted by number of affected object files:') - count = {k: len(v) for (k, v) in d.iteritems()} + count = {k: len(v) for (k, v) in d.items()} for f in sorted(count, key=count.get, reverse=True): if f in missing: print(count[f], f) diff --git a/build/check_gn_headers_unittest.py b/build/check_gn_headers_unittest.py index 20c3b1389790..954d95bfc959 100755 --- a/build/check_gn_headers_unittest.py +++ b/build/check_gn_headers_unittest.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2017 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -71,7 +71,7 @@ def testNinja(self): 'dir3/path/b.h': ['obj/c.o'], 'c3.hh': ['obj/c.o'], } - self.assertEquals(headers, expected) + self.assertEqual(headers, expected) def testGn(self): headers = check_gn_headers.ParseGNProjectJSON(gn_input, @@ -83,7 +83,7 @@ def testGn(self): 'base/p.h', 'out/Release/gen/a.h', ]) - self.assertEquals(headers, expected) + self.assertEqual(headers, expected) def testWhitelist(self): output = check_gn_headers.ParseWhiteList(whitelist) @@ -93,7 +93,7 @@ def testWhitelist(self): 'dir/white-both.c', 'a/b/c', ]) - self.assertEquals(output, expected) + self.assertEqual(output, expected) if __name__ == '__main__': diff --git a/build/check_gn_headers_whitelist.txt b/build/check_gn_headers_whitelist.txt index 2acf1b785a57..dfefd7d2458f 100644 --- a/build/check_gn_headers_whitelist.txt +++ b/build/check_gn_headers_whitelist.txt @@ -3,12 +3,11 @@ ash/accelerators/accelerator_table.h ash/ash_export.h +ash/constants/ash_switches.h ash/metrics/task_switch_metrics_recorder.h ash/metrics/task_switch_source.h -ash/metrics/user_metrics_action.h ash/metrics/user_metrics_recorder.h ash/public/cpp/ash_public_export.h -ash/public/cpp/ash_switches.h ash/public/cpp/shelf_types.h ash/session/session_observer.h ash/shell.h @@ -17,24 +16,20 @@ cc/cc_export.h cc/input/browser_controls_state.h cc/input/event_listener_properties.h cc/input/scrollbar.h -cc/input/scroller_size_metrics.h cc/layers/performance_properties.h chrome/browser/android/android_theme_resources.h chrome/browser/android/resource_id.h -chrome/browser/ash/certificate_provider/certificate_info.h -chrome/browser/ash/certificate_provider/certificate_provider.h -chrome/browser/ash/certificate_provider/certificate_provider_service.h -chrome/browser/ash/certificate_provider/certificate_provider_service_factory.h -chrome/browser/ash/certificate_provider/certificate_requests.h -chrome/browser/ash/certificate_provider/pin_dialog_manager.h -chrome/browser/ash/certificate_provider/sign_requests.h -chrome/browser/ash/certificate_provider/thread_safe_certificate_map.h chrome/browser/ash/login/signin/oauth2_login_manager.h -chrome/browser/ash/login/signin/oauth2_login_verifier.h chrome/browser/ash/login/signin/oauth2_token_fetcher.h -chrome/browser/ash/notifications/request_pin_view.h chrome/browser/ash/profiles/profile_helper.h chrome/browser/ash/settings/cros_settings.h +chrome/browser/certificate_provider/certificate_provider.h +chrome/browser/certificate_provider/certificate_provider_service.h +chrome/browser/certificate_provider/certificate_provider_service_factory.h +chrome/browser/certificate_provider/certificate_requests.h +chrome/browser/certificate_provider/pin_dialog_manager.h +chrome/browser/certificate_provider/sign_requests.h +chrome/browser/certificate_provider/thread_safe_certificate_map.h chrome/browser/component_updater/component_installer_errors.h chrome/browser/download/download_file_icon_extractor.h chrome/browser/extensions/api/networking_cast_private/chrome_networking_cast_private_delegate.h @@ -46,7 +41,7 @@ chrome/browser/media/webrtc/rtp_dump_type.h chrome/browser/media_galleries/media_file_system_context.h chrome/browser/notifications/displayed_notifications_dispatch_callback.h chrome/browser/ui/app_icon_loader_delegate.h -chrome/browser/ui/app_list/app_list_syncable_service_factory.h +chrome/browser/ash/app_list/app_list_syncable_service_factory.h chrome/browser/ui/ash/ash_util.h chrome/browser/ui/ash/multi_user/multi_user_util.h chrome/browser/ui/network_profile_bubble.h @@ -58,17 +53,12 @@ chrome/install_static/install_modes.h chrome/install_static/install_util.h chrome/install_static/test/scoped_install_details.h chrome/installer/util/google_update_settings.h -components/browser_watcher/features.h -components/browser_watcher/stability_paths.h -components/cast_certificate/cast_crl_root_ca_cert_der-inc.h components/cdm/browser/cdm_message_filter_android.h components/device_event_log/device_event_log_export.h components/login/login_export.h +components/media_router/common/providers/cast/certificate/cast_crl_root_ca_cert_der-inc.h components/nacl/browser/nacl_browser_delegate.h components/nacl/renderer/ppb_nacl_private.h -components/omnibox/browser/autocomplete_i18n.h -components/omnibox/browser/autocomplete_provider_client.h -components/omnibox/browser/autocomplete_provider_listener.h components/policy/core/browser/configuration_policy_handler_parameters.h components/policy/proto/policy_proto_export.h components/rlz/rlz_tracker_delegate.h @@ -117,7 +107,6 @@ gpu/command_buffer/service/memory_tracking.h gpu/config/gpu_lists_version.h gpu/gles2_conform_support/gtf/gtf_stubs.h gpu/gpu_export.h -headless/lib/headless_macros.h ipc/ipc_channel_proxy_unittest_messages.h ipc/ipc_message_null_macros.h media/audio/audio_logging.h @@ -220,27 +209,6 @@ third_party/qcms/src/tests/timing.h third_party/snappy/linux/config.h third_party/speech-dispatcher/libspeechd.h third_party/sqlite/sqlite3.h -third_party/tcmalloc/chromium/src/addressmap-inl.h -third_party/tcmalloc/chromium/src/base/basictypes.h -third_party/tcmalloc/chromium/src/base/dynamic_annotations.h -third_party/tcmalloc/chromium/src/base/googleinit.h -third_party/tcmalloc/chromium/src/base/linux_syscall_support.h -third_party/tcmalloc/chromium/src/base/spinlock_linux-inl.h -third_party/tcmalloc/chromium/src/base/stl_allocator.h -third_party/tcmalloc/chromium/src/base/thread_annotations.h -third_party/tcmalloc/chromium/src/base/thread_lister.h -third_party/tcmalloc/chromium/src/gperftools/malloc_extension_c.h -third_party/tcmalloc/chromium/src/gperftools/malloc_hook_c.h -third_party/tcmalloc/chromium/src/gperftools/tcmalloc.h -third_party/tcmalloc/chromium/src/heap-profile-stats.h -third_party/tcmalloc/chromium/src/libc_override.h -third_party/tcmalloc/chromium/src/malloc_hook_mmap_linux.h -third_party/tcmalloc/chromium/src/packed-cache-inl.h -third_party/tcmalloc/chromium/src/page_heap_allocator.h -third_party/tcmalloc/chromium/src/pagemap.h -third_party/tcmalloc/chromium/src/stacktrace_x86-inl.h -third_party/tcmalloc/chromium/src/system-alloc.h -third_party/tcmalloc/chromium/src/tcmalloc_guard.h third_party/wayland/include/config.h third_party/wayland/include/src/wayland-version.h third_party/woff2/src/port.h diff --git a/build/check_return_value.py b/build/check_return_value.py index 9caa15f112c1..2337e962da1e 100755 --- a/build/check_return_value.py +++ b/build/check_return_value.py @@ -1,12 +1,11 @@ -#!/usr/bin/env python -# Copyright 2014 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """This program wraps an arbitrary command and prints "1" if the command ran successfully.""" -from __future__ import print_function import os import subprocess diff --git a/build/chromeos/.style.yapf b/build/chromeos/.style.yapf index de0c6a70f38b..fdd07237cbe3 100644 --- a/build/chromeos/.style.yapf +++ b/build/chromeos/.style.yapf @@ -1,2 +1,2 @@ [style] -based_on_style = chromium +based_on_style = yapf diff --git a/build/chromeos/PRESUBMIT.py b/build/chromeos/PRESUBMIT.py index 312faf03ef53..b9734e6aa5fb 100644 --- a/build/chromeos/PRESUBMIT.py +++ b/build/chromeos/PRESUBMIT.py @@ -1,4 +1,4 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Presubmit script for build/chromeos/. @@ -8,13 +8,25 @@ """ +USE_PYTHON3 = True + + def CommonChecks(input_api, output_api): results = [] - results += input_api.canned_checks.RunPylint( - input_api, output_api, pylintrc='pylintrc') - tests = input_api.canned_checks.GetUnitTestsInDirectory( - input_api, output_api, '.', [r'^.+_test\.py$'], run_on_python3=True) - results += input_api.RunTests(tests) + # These tests don't run on Windows and give verbose and cryptic failure + # messages. Linting the code on a platform where it will not run is also not + # valuable and gives spurious errors. + if input_api.sys.platform != 'win32': + results += input_api.canned_checks.RunPylint( + input_api, output_api, pylintrc='pylintrc', version='2.6') + tests = input_api.canned_checks.GetUnitTestsInDirectory( + input_api, + output_api, + '.', [r'^.+_test\.py$'], + run_on_python2=False, + run_on_python3=True, + skip_shebang_check=True) + results += input_api.RunTests(tests) return results diff --git a/build/chromeos/generate_skylab_deps.py b/build/chromeos/generate_skylab_deps.py new file mode 100755 index 000000000000..a929245ecf70 --- /dev/null +++ b/build/chromeos/generate_skylab_deps.py @@ -0,0 +1,206 @@ +#!/usr/bin/env python3 +# +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import json +import os +import re +import sys + +# The basic shell script for client test run in Skylab. The arguments listed +# here will be fed by autotest at the run time. +# +# * test-launcher-summary-output: the path for the json result. It will be +# assigned by autotest, who will upload it to GCS upon test completion. +# * test-launcher-shard-index: the index for this test run. +# * test-launcher-total-shards: the total test shards. +# * test_args: arbitrary runtime arguments configured in test_suites.pyl, +# attached after '--'. +BASIC_SHELL_SCRIPT = """ +#!/bin/sh + +while [[ $# -gt 0 ]]; do + case "$1" in + --test-launcher-summary-output) + summary_output=$2 + shift 2 + ;; + + --test-launcher-shard-index) + shard_index=$2 + shift 2 + ;; + + --test-launcher-total-shards) + total_shards=$2 + shift 2 + ;; + + --) + test_args=$2 + break + ;; + + *) + break + ;; + esac +done + +if [ ! -d $(dirname $summary_output) ] ; then + mkdir -p $(dirname $summary_output) +fi + +cd `dirname $0` && cd .. +""" + + +def build_test_script(args): + # Build the shell script that will be used on the device to invoke the test. + # Stored here as a list of lines. + device_test_script_contents = BASIC_SHELL_SCRIPT.split('\n') + + test_invocation = ('LD_LIBRARY_PATH=./ ./%s ' + ' --test-launcher-summary-output=$summary_output' + ' --test-launcher-shard-index=$shard_index' + ' --test-launcher-total-shards=$total_shards' + ' $test_args' % args.test_exe) + + device_test_script_contents.append(test_invocation) + with open(args.output, 'w') as w: + w.write('\n'.join(device_test_script_contents)) + os.chmod(args.output, 0o755) + + +def build_filter_file(args): + # TODO(b/227381644): This expression is hard to follow and should be + # simplified. This would require a change on the cros infra side as well + tast_expr_dict = {} + default_disabled_tests = [] + if args.disabled_tests is not None: + default_disabled_tests = [ + '!"name:{0}"'.format(test) for test in args.disabled_tests + ] + + default_enabled_test_term = '' + if args.enabled_tests is not None: + default_enabled_test_term = (' || ').join( + ['"name:{0}"'.format(test) for test in args.enabled_tests]) + + # Generate the default expression to be used when there is no known key + tast_expr = args.tast_expr if args.tast_expr else "" + + if default_disabled_tests: + default_disabled_term = " && ".join(default_disabled_tests) + tast_expr = "{0} && {1}".format(tast_expr, default_disabled_term) if \ + tast_expr else default_disabled_term + + if default_enabled_test_term: + tast_expr = "{0} && ({1})".format( + tast_expr, + default_enabled_test_term) if tast_expr else default_enabled_test_term + + tast_expr_dict['default'] = "({0})".format(tast_expr) + + # Generate an expression for each collection in the gni file + if args.tast_control is not None: + with open(args.tast_control, 'r') as tast_control_file: + gni = tast_control_file.read() + filter_lists = re.findall(r'(.*) = \[([^\]]*)\]', gni) + for filter_list in filter_lists: + tast_expr = args.tast_expr if args.tast_expr else "" + + milestone_disabled_tests = { + '!"name:{0}"'.format(test) + for test in re.findall(r'"([^"]+)"', filter_list[1]) + } + + milestone_disabled_tests.update(default_disabled_tests) + + if milestone_disabled_tests: + tast_expr = "{0} && {1}".format( + tast_expr, " && ".join(milestone_disabled_tests) + ) if tast_expr else " && ".join(milestone_disabled_tests) + + if default_enabled_test_term: + tast_expr = "{0} && ({1})".format( + tast_expr, default_enabled_test_term + ) if tast_expr else default_enabled_test_term + + if tast_expr: + tast_expr_dict[filter_list[0]] = "({0})".format(tast_expr) + + if len(tast_expr_dict) > 0: + with open(args.output, "w") as file: + json.dump(tast_expr_dict, file, indent=2) + os.chmod(args.output, 0o644) + + +def main(): + parser = argparse.ArgumentParser() + subparsers = parser.add_subparsers(dest='command') + + script_gen_parser = subparsers.add_parser('generate-runner') + script_gen_parser.add_argument( + '--test-exe', + type=str, + required=True, + help='Path to test executable to run inside the device.') + script_gen_parser.add_argument('--verbose', '-v', action='store_true') + script_gen_parser.add_argument( + '--output', + required=True, + type=str, + help='Path to create the runner script.') + script_gen_parser.set_defaults(func=build_test_script) + + filter_gen_parser = subparsers.add_parser('generate-filter') + filter_gen_parser.add_argument( + '--tast-expr', + type=str, + required=False, + help='Tast expression to determine tests to run. This creates the ' + 'initial set of tests that can be further filtered.') + filter_gen_parser.add_argument( + '--enabled-tests', + type=str, + required=False, + action='append', + help='Name of tests to allow to test (unnamed tests will not run).') + filter_gen_parser.add_argument( + '--disabled-tests', + type=str, + required=False, + action='append', + help='Names of tests to disable from running') + filter_gen_parser.add_argument( + '--tast-control', + type=str, + required=False, + help='Filename for the tast_control file containing version skew ' + 'test filters to generate.') + filter_gen_parser.add_argument( + '--output', + required=True, + type=str, + help='Path to create the plain text filter file.') + filter_gen_parser.set_defaults(func=build_filter_file) + + args = parser.parse_args() + + if (args.command == "generate-filter" and args.disabled_tests is None and + args.enabled_tests is None and args.tast_expr is None): + parser.error( + '--disabled-tests, --enabled-tests, or --tast-expr must be provided ' + 'to generate-filter') + + args.func(args) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/chromeos/generate_skylab_deps_test.py b/build/chromeos/generate_skylab_deps_test.py new file mode 100755 index 000000000000..9a30825f0db1 --- /dev/null +++ b/build/chromeos/generate_skylab_deps_test.py @@ -0,0 +1,178 @@ +#!/usr/bin/env python3 +# +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import unittest +from unittest import mock + +import generate_skylab_deps + +TAST_CONTROL = ''' +# Ignore comments +tast_disabled_tests_from_chrome_all = [ + "example.all.test1", +] +tast_disabled_tests_from_chrome_m100 = [ + "example.m100.test1", +] +tast_disabled_tests_from_lacros_all = [] +''' + +TAST_EXPR = '"group:mainline" && "dep:chrome" && !informational' + +REQUIRED_ARGS = ['script', 'generate-filter', '--output', 'output.filter'] + + +class GenerateSkylabDepsTest(unittest.TestCase): + + def testTastExpr(self): + file_mock = mock.mock_open(read_data=TAST_CONTROL) + args = REQUIRED_ARGS + ['--tast-expr', TAST_EXPR] + + with mock.patch('sys.argv', args),\ + mock.patch('builtins.open', file_mock),\ + mock.patch('os.chmod'),\ + mock.patch("json.dump", mock.MagicMock()) as dump: + generate_skylab_deps.main() + filter_dict = dump.call_args[0][0] + self.assertEqual(filter_dict['default'], '(%s)' % TAST_EXPR) + + def testTastExprAndDisableTests(self): + file_mock = mock.mock_open(read_data=TAST_CONTROL) + args = REQUIRED_ARGS + [ + '--tast-expr', TAST_EXPR, '--disabled-tests', 'disabled.test1', + '--disabled-tests', 'disabled.test2' + ] + + with mock.patch('sys.argv', args),\ + mock.patch('builtins.open', file_mock),\ + mock.patch('os.chmod'),\ + mock.patch("json.dump", mock.MagicMock()) as dump: + generate_skylab_deps.main() + filter_dict = dump.call_args[0][0] + self.assertEqual( + filter_dict['default'], + '(%s && !"name:disabled.test1" && !"name:disabled.test2")' % + TAST_EXPR) + + def testEnableTests(self): + file_mock = mock.mock_open(read_data=TAST_CONTROL) + args = REQUIRED_ARGS + [ + '--enabled-tests', 'enabled.test1', '--enabled-tests', 'enabled.test2' + ] + + with mock.patch('sys.argv', args),\ + mock.patch('builtins.open', file_mock),\ + mock.patch('os.chmod'),\ + mock.patch("json.dump", mock.MagicMock()) as dump: + generate_skylab_deps.main() + filter_dict = dump.call_args[0][0] + self.assertEqual(filter_dict['default'], + '("name:enabled.test1" || "name:enabled.test2")') + + def testTastControlWithTastExpr(self): + file_mock = mock.mock_open(read_data=TAST_CONTROL) + args = REQUIRED_ARGS + [ + '--tast-expr', + TAST_EXPR, + '--tast-control', + 'mocked_input', + ] + + with mock.patch('sys.argv', args),\ + mock.patch('builtins.open', file_mock),\ + mock.patch('os.chmod'),\ + mock.patch("json.dump", mock.MagicMock()) as dump: + generate_skylab_deps.main() + filter_dict = dump.call_args[0][0] + self.assertEqual(filter_dict['default'], '(%s)' % TAST_EXPR) + self.assertEqual(filter_dict['tast_disabled_tests_from_chrome_m100'], + '(%s && !"name:example.m100.test1")' % TAST_EXPR) + + def testTastControlWithTastExprAndDisabledTests(self): + file_mock = mock.mock_open(read_data=TAST_CONTROL) + args = REQUIRED_ARGS + [ + '--tast-expr', TAST_EXPR, '--tast-control', 'mocked_input', + '--disabled-tests', 'disabled.test1', '--disabled-tests', + 'disabled.test2' + ] + + with mock.patch('sys.argv', args),\ + mock.patch('builtins.open', file_mock),\ + mock.patch('os.chmod'),\ + mock.patch("json.dump", mock.MagicMock()) as dump: + generate_skylab_deps.main() + filter_dict = dump.call_args[0][0] + self.assertEqual( + filter_dict['default'], + '("group:mainline" && "dep:chrome" && !informational && !'\ + '"name:disabled.test1" && !"name:disabled.test2")' + ) + + # The list from a set is indeterminent + self.assertIn('"group:mainline" && "dep:chrome" && !informational', + filter_dict['tast_disabled_tests_from_chrome_m100']) + self.assertIn('&& !"name:disabled.test1"', + filter_dict['tast_disabled_tests_from_chrome_m100']) + self.assertIn('&& !"name:disabled.test2"', + filter_dict['tast_disabled_tests_from_chrome_m100']) + self.assertIn('&& !"name:example.m100.test1"', + filter_dict['tast_disabled_tests_from_chrome_m100']) + + def testTastControlWithTastExprAndEnabledTests(self): + file_mock = mock.mock_open(read_data=TAST_CONTROL) + args = REQUIRED_ARGS + [ + '--tast-expr', TAST_EXPR, '--tast-control', 'mocked_input', + '--enabled-tests', 'enabled.test1', '--enabled-tests', 'enabled.test2' + ] + + with mock.patch('sys.argv', args),\ + mock.patch('builtins.open', file_mock),\ + mock.patch('os.chmod'),\ + mock.patch("json.dump", mock.MagicMock()) as dump: + generate_skylab_deps.main() + filter_dict = dump.call_args[0][0] + self.assertEqual( + filter_dict['default'], + '("group:mainline" && "dep:chrome" && !informational && '\ + '("name:enabled.test1" || "name:enabled.test2"))' + ) + self.assertEqual( + filter_dict['tast_disabled_tests_from_chrome_m100'], + '("group:mainline" && "dep:chrome" && !informational && '\ + '!"name:example.m100.test1" && ("name:enabled.test1" '\ + '|| "name:enabled.test2"))' + ) + + def testTastControlWithEnabledTests(self): + file_mock = mock.mock_open(read_data=TAST_CONTROL) + args = REQUIRED_ARGS + [ + '--tast-control', + 'mocked_input', + '--enabled-tests', + 'enabled.test1', + '--enabled-tests', + 'enabled.test2', + ] + + with mock.patch('sys.argv', args),\ + mock.patch('builtins.open', file_mock),\ + mock.patch('os.chmod'),\ + mock.patch("json.dump", mock.MagicMock()) as dump: + generate_skylab_deps.main() + filter_dict = dump.call_args[0][0] + # Should not include 'all' collection from TAST_CONTROL since that would + # need to be passed in the --disabled-tests to be included + self.assertEqual(filter_dict['default'], + '("name:enabled.test1" || "name:enabled.test2")') + self.assertEqual( + filter_dict['tast_disabled_tests_from_chrome_m100'], + '(!"name:example.m100.test1" && '\ + '("name:enabled.test1" || "name:enabled.test2"))' + ) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/chromeos/test_runner.py b/build/chromeos/test_runner.py index 36b024b105a1..14c31e1c4a55 100755 --- a/build/chromeos/test_runner.py +++ b/build/chromeos/test_runner.py @@ -1,6 +1,6 @@ #!/usr/bin/env vpython3 # -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -9,20 +9,19 @@ import json import logging import os -import pipes import re import shutil import signal import socket import sys import tempfile +import six # The following non-std imports are fetched via vpython. See the list at # //.vpython import dateutil.parser # pylint: disable=import-error import jsonlines # pylint: disable=import-error import psutil # pylint: disable=import-error -import six CHROMIUM_SRC_PATH = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..')) @@ -31,13 +30,15 @@ # output json ourselves. sys.path.insert(0, os.path.join(CHROMIUM_SRC_PATH, 'build', 'android')) from pylib.base import base_test_result # pylint: disable=import-error -from pylib.base import result_sink # pylint: disable=import-error from pylib.results import json_results # pylint: disable=import-error -if six.PY2: - import subprocess32 as subprocess # pylint: disable=import-error -else: - import subprocess # pylint: disable=import-error,wrong-import-order +sys.path.insert(0, os.path.join(CHROMIUM_SRC_PATH, 'build', 'util')) +# TODO(crbug.com/1421441): Re-enable the 'no-name-in-module' check. +from lib.results import result_sink # pylint: disable=import-error,no-name-in-module + +assert not six.PY2, 'Py2 not supported for this file.' + +import subprocess # pylint: disable=import-error,wrong-import-order DEFAULT_CROS_CACHE = os.path.abspath( os.path.join(CHROMIUM_SRC_PATH, 'build', 'cros_cache')) @@ -68,7 +69,7 @@ class TestFormatError(Exception): pass -class RemoteTest(object): +class RemoteTest: # This is a basic shell script that can be appended to in order to invoke the # test on the device. @@ -131,20 +132,6 @@ def __init__(self, args, unknown_args): if args.public_image: self._test_cmd += ['--public-image'] - # This environment variable is set for tests that have been instrumented - # for code coverage. Its incoming value is expected to be a location - # inside a subdirectory of result_dir above. This is converted to an - # absolute path that the vm is able to write to, and passed in the - # --results-src flag to cros_run_vm_test for copying out of the vm before - # its termination. - self._llvm_profile_var = None - if os.environ.get('LLVM_PROFILE_FILE'): - _, llvm_profile_file = os.path.split(os.environ['LLVM_PROFILE_FILE']) - self._llvm_profile_var = '/tmp/profraw/%s' % llvm_profile_file - - # This should make the vm test runner exfil the profiling data. - self._test_cmd += ['--results-src', '/tmp/profraw'] - self._test_env = setup_env() @property @@ -164,7 +151,7 @@ def write_test_script_to_disk(self, script_contents): os.path.relpath(self._path_to_outdir, CHROMIUM_SRC_PATH), ] logging.info('Running the following command on the device:') - logging.info('\n' + '\n'.join(script_contents)) + logging.info('\n%s', '\n'.join(script_contents)) fd, tmp_path = tempfile.mkstemp(suffix='.sh', dir=self._path_to_outdir) os.fchmod(fd, 0o755) with os.fdopen(fd, 'w') as f: @@ -217,28 +204,14 @@ def _kill_child_procs(trapped_signal, _): if test_proc.returncode == 0: break - ret = self.post_run(test_proc.returncode) + self.post_run(test_proc.returncode) # Allow post_run to override test proc return code. (Useful when the host # side Tast bin returns 0 even for failed tests.) - if ret is not None: - return ret return test_proc.returncode - def post_run(self, return_code): + def post_run(self, _): if self._on_device_script: os.remove(self._on_device_script) - # Create a simple json results file for a test run. The results will contain - # only one test (suite_name), and will either be a PASS or FAIL depending on - # return_code. - if self._test_launcher_summary_output: - result = ( - base_test_result.ResultType.FAIL - if return_code else base_test_result.ResultType.PASS) - suite_result = base_test_result.BaseTestResult(self.suite_name, result) - run_results = base_test_result.TestRunResults() - run_results.AddResult(suite_result) - with open(self._test_launcher_summary_output, 'w') as f: - json.dump(json_results.GenerateResultsDict([run_results]), f) @staticmethod def get_artifacts(path): @@ -253,7 +226,15 @@ def get_artifacts(path): for dirpath, _, filenames in os.walk(path): for f in filenames: artifact_path = os.path.join(dirpath, f) - artifacts[os.path.relpath(artifact_path, path)] = { + artifact_id = os.path.relpath(artifact_path, path) + # Some artifacts will have non-Latin characters in the filename, eg: + # 'ui_tree_Chinese Pinyin-你好.txt'. ResultDB's API rejects such + # characters as an artifact ID, so force the file name down into ascii. + # For more info, see: + # https://source.chromium.org/chromium/infra/infra/+/main:go/src/go.chromium.org/luci/resultdb/proto/v1/artifact.proto;drc=3bff13b8037ca76ec19f9810033d914af7ec67cb;l=46 + artifact_id = artifact_id.encode('ascii', 'replace').decode() + artifact_id = artifact_id.replace('\\', '?') + artifacts[artifact_id] = { 'filePath': artifact_path, } return artifacts @@ -262,10 +243,11 @@ def get_artifacts(path): class TastTest(RemoteTest): def __init__(self, args, unknown_args): - super(TastTest, self).__init__(args, unknown_args) + super().__init__(args, unknown_args) self._suite_name = args.suite_name self._tast_vars = args.tast_vars + self._tast_retries = args.tast_retries self._tests = args.tests # The CQ passes in '--gtest_filter' when specifying tests to skip. Store it # here and parse it later to integrate it into Tast executions. @@ -273,14 +255,9 @@ def __init__(self, args, unknown_args): self._attr_expr = args.attr_expr self._should_strip = args.strip_chrome self._deploy_lacros = args.deploy_lacros + self._deploy_chrome = args.deploy_chrome - if self._deploy_lacros and self._should_strip: - raise TestFormatError( - '--strip-chrome is only applicable to ash-chrome because ' - 'lacros-chrome deployment uses --nostrip by default, so it cannot ' - 'be specificed with --deploy-lacros.') - - if not self._llvm_profile_var and not self._logs_dir: + if not self._logs_dir: # The host-side Tast bin returns 0 when tests fail, so we need to capture # and parse its json results to reliably determine if tests fail. raise TestFormatError( @@ -313,101 +290,72 @@ def build_test_command(self): ] # Lacros deployment mounts itself by default. - self._test_cmd.extend([ - '--deploy-lacros', '--lacros-launcher-script', - LACROS_LAUNCHER_SCRIPT_PATH - ] if self._deploy_lacros else ['--deploy', '--mount']) + if self._deploy_lacros: + self._test_cmd.extend([ + '--deploy-lacros', '--lacros-launcher-script', + LACROS_LAUNCHER_SCRIPT_PATH + ]) + if self._deploy_chrome: + self._test_cmd.extend(['--deploy', '--mount']) + else: + self._test_cmd.extend(['--deploy', '--mount']) self._test_cmd += [ '--build-dir', os.path.relpath(self._path_to_outdir, CHROMIUM_SRC_PATH) ] + self._additional_args - # Coverage tests require some special pre-test setup, so use an - # on_device_script in that case. For all other tests, use cros_run_test's - # built-in '--tast' option. This gives us much better results reporting. - if self._llvm_profile_var: - # Build the shell script that will be used on the device to invoke the - # test. - device_test_script_contents = self.BASIC_SHELL_SCRIPT[:] - device_test_script_contents += [ - 'echo "LLVM_PROFILE_FILE=%s" >> /etc/chrome_dev.conf' % - (self._llvm_profile_var) - ] - - local_test_runner_cmd = ['local_test_runner', '-waituntilready'] - if self._use_vm: - # If we're running tests in VMs, tell the test runner to skip tests that - # aren't compatible. - local_test_runner_cmd.append('-extrauseflags=tast_vm') - if self._attr_expr: - local_test_runner_cmd.append(pipes.quote(self._attr_expr)) - else: - local_test_runner_cmd.extend(self._tests) - device_test_script_contents.append(' '.join(local_test_runner_cmd)) - - self._on_device_script = self.write_test_script_to_disk( - device_test_script_contents) - + # Capture tast's results in the logs dir as well. + if self._logs_dir: self._test_cmd += [ - '--files', - os.path.relpath(self._on_device_script), '--', - './' + os.path.relpath(self._on_device_script, self._path_to_outdir) + '--results-dir', + self._logs_dir, ] + self._test_cmd += [ + '--tast-total-shards=%d' % self._test_launcher_total_shards, + '--tast-shard-index=%d' % self._test_launcher_shard_index, + ] + # If we're using a test filter, replace the contents of the Tast + # conditional with a long list of "name:test" expressions, one for each + # test in the filter. + if self._gtest_style_filter: + if self._attr_expr or self._tests: + logging.warning( + 'Presence of --gtest_filter will cause the specified Tast expr' + ' or test list to be ignored.') + names = [] + for test in self._gtest_style_filter.split(':'): + names.append('"name:%s"' % test) + self._attr_expr = '(' + ' || '.join(names) + ')' + + if self._attr_expr: + # Don't use pipes.quote() here. Something funky happens with the arg + # as it gets passed down from cros_run_test to tast. (Tast picks up the + # escaping single quotes and complains that the attribute expression + # "must be within parentheses".) + self._test_cmd.append('--tast=%s' % self._attr_expr) else: - # Capture tast's results in the logs dir as well. - if self._logs_dir: - self._test_cmd += [ - '--results-dir', - self._logs_dir, - ] - self._test_cmd += [ - '--tast-total-shards=%d' % self._test_launcher_total_shards, - '--tast-shard-index=%d' % self._test_launcher_shard_index, - ] - # If we're using a test filter, replace the contents of the Tast - # conditional with a long list of "name:test" expressions, one for each - # test in the filter. - if self._gtest_style_filter: - if self._attr_expr or self._tests: - logging.warning( - 'Presence of --gtest_filter will cause the specified Tast expr' - ' or test list to be ignored.') - names = [] - for test in self._gtest_style_filter.split(':'): - names.append('"name:%s"' % test) - self._attr_expr = '(' + ' || '.join(names) + ')' - - if self._attr_expr: - # Don't use pipes.quote() here. Something funky happens with the arg - # as it gets passed down from cros_run_test to tast. (Tast picks up the - # escaping single quotes and complains that the attribute expression - # "must be within parentheses".) - self._test_cmd.append('--tast=%s' % self._attr_expr) - else: - self._test_cmd.append('--tast') - self._test_cmd.extend(self._tests) + self._test_cmd.append('--tast') + self._test_cmd.extend(self._tests) - for v in self._tast_vars or []: - self._test_cmd.extend(['--tast-var', v]) + for v in self._tast_vars or []: + self._test_cmd.extend(['--tast-var', v]) - # Mounting ash-chrome gives it enough disk space to not need stripping, - # but only for one not instrumented with code coverage. - # Lacros uses --nostrip by default, so there is no need to specify. - if not self._deploy_lacros and not self._should_strip: - self._test_cmd.append('--nostrip') + if self._tast_retries: + self._test_cmd.append('--tast-retries=%d' % self._tast_retries) - def post_run(self, return_code): - # If we don't need to parse the host-side Tast tool's results, fall back to - # the parent method's default behavior. - if self._llvm_profile_var: - return super(TastTest, self).post_run(return_code) + # Mounting ash-chrome gives it enough disk space to not need stripping, + # but only for one not instrumented with code coverage. + # Lacros uses --nostrip by default, so there is no need to specify. + if not self._deploy_lacros and not self._should_strip: + self._test_cmd.append('--nostrip') + def post_run(self, return_code): tast_results_path = os.path.join(self._logs_dir, 'streamed_results.jsonl') if not os.path.exists(tast_results_path): logging.error( 'Tast results not found at %s. Falling back to generic result ' 'reporting.', tast_results_path) - return super(TastTest, self).post_run(return_code) + return super().post_run(return_code) # See the link below for the format of the results: # https://godoc.org/chromium.googlesource.com/chromiumos/platform/tast.git/src/chromiumos/cmd/tast/run#TestResult @@ -431,15 +379,16 @@ def post_run(self, return_code): result = base_test_result.ResultType.FAIL else: result = base_test_result.ResultType.PASS + primary_error_message = None error_log = '' if errors: # See the link below for the format of these errors: - # https://godoc.org/chromium.googlesource.com/chromiumos/platform/tast.git/src/chromiumos/tast/testing#Error + # https://source.chromium.org/chromiumos/chromiumos/codesearch/+/main:src/platform/tast/src/chromiumos/tast/cmd/tast/internal/run/resultsjson/resultsjson.go + primary_error_message = errors[0]['reason'] for err in errors: error_log += err['stack'] + '\n' - error_log += ( - "\nIf you're unsure why this test failed, consult the steps " - 'outlined in\n%s\n' % TAST_DEBUG_DOC) + debug_link = ("If you're unsure why this test failed, consult the steps " + 'outlined here.' % TAST_DEBUG_DOC) base_result = base_test_result.BaseTestResult( test['name'], result, duration=duration_ms, log=error_log) suite_results.AddResult(base_result) @@ -450,8 +399,15 @@ def post_run(self, return_code): # inside as an RDB 'artifact'. (This could include system logs, screen # shots, etc.) artifacts = self.get_artifacts(test['outDir']) - self._rdb_client.Post(test['name'], result, duration_ms, error_log, - artifacts) + self._rdb_client.Post( + test['name'], + result, + duration_ms, + error_log, + None, + artifacts=artifacts, + failure_reason=primary_error_message, + html_artifact=debug_link) if self._rdb_client and self._logs_dir: # Attach artifacts from the device that don't apply to a single test. @@ -467,7 +423,7 @@ def post_run(self, return_code): if not suite_results.DidRunPass(): return 1 - elif return_code: + if return_code: logging.warning( 'No failed tests found, but exit code of %d was returned from ' 'cros_run_test.', return_code) @@ -523,7 +479,7 @@ class GTestTest(RemoteTest): ] def __init__(self, args, unknown_args): - super(GTestTest, self).__init__(args, unknown_args) + super().__init__(args, unknown_args) self._test_exe = args.test_exe self._runtime_deps_path = args.runtime_deps_path @@ -583,31 +539,26 @@ def build_test_command(self): # Build the shell script that will be used on the device to invoke the test. # Stored here as a list of lines. device_test_script_contents = self.BASIC_SHELL_SCRIPT[:] - if self._llvm_profile_var: - device_test_script_contents += [ - 'export LLVM_PROFILE_FILE=%s' % self._llvm_profile_var, - ] - for var_name, var_val in self._env_vars: device_test_script_contents += ['export %s=%s' % (var_name, var_val)] if self._vpython_dir: vpython_path = os.path.join(self._path_to_outdir, self._vpython_dir, - 'vpython') + 'vpython3') cpython_path = os.path.join(self._path_to_outdir, self._vpython_dir, - 'bin', 'python') + 'bin', 'python3') if not os.path.exists(vpython_path) or not os.path.exists(cpython_path): raise TestFormatError( - '--vpython-dir must point to a dir with both infra/python/cpython ' - 'and infra/tools/luci/vpython installed.') + '--vpython-dir must point to a dir with both ' + 'infra/3pp/tools/cpython3 and infra/tools/luci/vpython installed.') vpython_spec_path = os.path.relpath( - os.path.join(CHROMIUM_SRC_PATH, '.vpython'), self._path_to_outdir) + os.path.join(CHROMIUM_SRC_PATH, '.vpython3'), self._path_to_outdir) # Initialize the vpython cache. This can take 10-20s, and some tests # can't afford to wait that long on the first invocation. device_test_script_contents.extend([ 'export PATH=$PWD/%s:$PWD/%s/bin/:$PATH' % (self._vpython_dir, self._vpython_dir), - 'vpython -vpython-spec %s -vpython-tool install' % + 'vpython3 -vpython-spec %s -vpython-tool install' % (vpython_spec_path), ]) @@ -622,7 +573,7 @@ def build_test_command(self): if self._trace_dir: device_test_script_contents.extend([ 'rm -rf %s' % device_trace_dir, - 'su chronos -c -- "mkdir -p %s"' % device_trace_dir, + 'sudo -E -u chronos -- /bin/bash -c "mkdir -p %s"' % device_trace_dir, ]) test_invocation += ' --trace-dir=%s' % device_trace_dir @@ -636,7 +587,8 @@ def build_test_command(self): # The UI service on the device owns the chronos user session, so shutting # it down as chronos kills the entire execution of the test. So we'll have # to run as root up until the test invocation. - test_invocation = 'su chronos -c -- "%s"' % test_invocation + test_invocation = ( + 'sudo -E -u chronos -- /bin/bash -c "%s"' % test_invocation) # And we'll need to chown everything since cros_run_test's "--as-chronos" # option normally does that for us. device_test_script_contents.append('chown -R chronos: ../..') @@ -662,9 +614,6 @@ def build_test_command(self): os.path.abspath( os.path.join(self._path_to_outdir, self._vpython_dir)), CHROMIUM_SRC_PATH)) - # TODO(bpastene): Add the vpython spec to the test's runtime deps instead - # of handling it here. - runtime_files.append('.vpython') for f in runtime_files: self._test_cmd.extend(['--files', f]) @@ -694,13 +643,17 @@ def post_run(self, _): if self._on_device_script: os.remove(self._on_device_script) + if self._test_launcher_summary_output and self._rdb_client: + logging.error('Native ResultDB integration is not supported for GTests. ' + 'Upload results via result_adapter instead. ' + 'See crbug.com/1330441.') + def device_test(args, unknown_args): # cros_run_test has trouble with relative paths that go up directories, # so cd to src/, which should be the root of all data deps. os.chdir(CHROMIUM_SRC_PATH) - # pylint: disable=redefined-variable-type # TODO: Remove the above when depot_tool's pylint is updated to include the # fix to https://github.com/PyCQA/pylint/issues/710. if args.test_type == 'tast': @@ -718,7 +671,7 @@ def device_test(args, unknown_args): def host_cmd(args, cmd_args): if not cmd_args: raise TestFormatError('Must specify command to run on the host.') - elif args.deploy_chrome and not args.path_to_outdir: + if args.deploy_chrome and not args.path_to_outdir: raise TestFormatError( '--path-to-outdir must be specified if --deploy-chrome is passed.') @@ -756,11 +709,22 @@ def host_cmd(args, cmd_args): test_env = setup_env() if args.deploy_chrome or args.deploy_lacros: - # Mounting ash-chrome gives it enough disk space to not need stripping. - cros_run_test_cmd.extend([ - '--deploy-lacros', '--lacros-launcher-script', - LACROS_LAUNCHER_SCRIPT_PATH - ] if args.deploy_lacros else ['--deploy', '--mount', '--nostrip']) + if args.deploy_lacros: + cros_run_test_cmd.extend([ + '--deploy-lacros', '--lacros-launcher-script', + LACROS_LAUNCHER_SCRIPT_PATH + ]) + if args.deploy_chrome: + # Mounting ash-chrome gives it enough disk space to not need stripping + # most of the time. + cros_run_test_cmd.extend(['--deploy', '--mount']) + else: + # Mounting ash-chrome gives it enough disk space to not need stripping + # most of the time. + cros_run_test_cmd.extend(['--deploy', '--mount']) + + if not args.strip_chrome: + cros_run_test_cmd.append('--nostrip') cros_run_test_cmd += [ '--build-dir', @@ -802,6 +766,13 @@ def add_common_args(*parsers): parser.add_argument('--verbose', '-v', action='store_true') parser.add_argument( '--board', type=str, required=True, help='Type of CrOS device.') + parser.add_argument( + '--deploy-chrome', + action='store_true', + help='Will deploy a locally built ash-chrome binary to the device ' + 'before running the host-cmd.') + parser.add_argument( + '--deploy-lacros', action='store_true', help='Deploy a lacros-chrome.') parser.add_argument( '--cros-cache', type=str, @@ -851,6 +822,10 @@ def add_common_args(*parsers): '--public-image', action='store_true', help='Will flash a public "full" image to the device.') + parser.add_argument( + '--magic-vm-cache', + help='Path to the magic CrOS VM cache dir. See the comment above ' + '"magic_cros_vm_cache" in mixins.pyl for more info.') vm_or_device_group = parser.add_mutually_exclusive_group() vm_or_device_group.add_argument( @@ -875,14 +850,10 @@ def main(): 'will be 127.0.0.1:9222.') host_cmd_parser.set_defaults(func=host_cmd) host_cmd_parser.add_argument( - '--deploy-chrome', - action='store_true', - help='Will deploy a locally built ash-chrome binary to the device before ' - 'running the host-cmd.') - host_cmd_parser.add_argument( - '--deploy-lacros', + '--strip-chrome', action='store_true', - help='Deploy a lacros-chrome instead of ash-chrome.') + help='Strips symbols from ash-chrome or lacros-chrome before deploying ' + ' to the device.') gtest_parser = subparsers.add_parser( 'gtest', help='Runs a device-side gtest.') @@ -946,22 +917,23 @@ def main(): '--strip-chrome', action='store_true', help='Strips symbols from ash-chrome before deploying to the device.') - tast_test_parser.add_argument( - '--deploy-lacros', - action='store_true', - help='Deploy a lacros-chrome instead of ash-chrome.') tast_test_parser.add_argument( '--tast-var', action='append', dest='tast_vars', help='Runtime variables for Tast tests, and the format are expected to ' 'be "key=value" pairs.') + tast_test_parser.add_argument( + '--tast-retries', + type=int, + dest='tast_retries', + help='Number of retries for failed Tast tests on the same DUT.') tast_test_parser.add_argument( '--test', '-t', action='append', dest='tests', - help='A Tast test to run in the device (eg: "ui.ChromeLogin").') + help='A Tast test to run in the device (eg: "login.Chrome").') tast_test_parser.add_argument( '--gtest_filter', type=str, @@ -970,20 +942,14 @@ def main(): 'cmd-line API, this will overwrite the value(s) of "--test" above.') add_common_args(gtest_parser, tast_test_parser, host_cmd_parser) - - args = sys.argv[1:] - unknown_args = [] - # If a '--' is present in the args, treat everything to the right of it as - # args to the test and everything to the left as args to this test runner. - # Otherwise treat all known args as args to this test runner and all unknown - # args as test args. - if '--' in args: - unknown_args = args[args.index('--') + 1:] - args = args[0:args.index('--')] - if unknown_args: - args = parser.parse_args(args=args) - else: - args, unknown_args = parser.parse_known_args() + args, unknown_args = parser.parse_known_args() + # Re-add N-1 -v/--verbose flags to the args we'll pass to whatever we are + # running. The assumption is that only one verbosity incrase would be meant + # for this script since it's a boolean value instead of increasing verbosity + # with more instances. + verbose_flags = [a for a in sys.argv if a in ('-v', '--verbose')] + if verbose_flags: + unknown_args += verbose_flags[1:] logging.basicConfig(level=logging.DEBUG if args.verbose else logging.WARN) @@ -1011,6 +977,12 @@ def main(): # unset in that case. os.environ.pop('BOTO_CONFIG', None) + if args.magic_vm_cache: + full_vm_cache_path = os.path.join(CHROMIUM_SRC_PATH, args.magic_vm_cache) + if os.path.exists(full_vm_cache_path): + with open(os.path.join(full_vm_cache_path, 'swarming.txt'), 'w') as f: + f.write('non-empty file to make swarming persist this cache') + return args.func(args, unknown_args) diff --git a/build/chromeos/test_runner_test.py b/build/chromeos/test_runner_test.py index 15d1b1ffdab3..c61c7a443341 100755 --- a/build/chromeos/test_runner_test.py +++ b/build/chromeos/test_runner_test.py @@ -1,5 +1,5 @@ #!/usr/bin/env vpython3 -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -9,17 +9,17 @@ import sys import tempfile import unittest +import six # The following non-std imports are fetched via vpython. See the list at # //.vpython import mock # pylint: disable=import-error from parameterized import parameterized # pylint: disable=import-error -import six import test_runner _TAST_TEST_RESULTS_JSON = { - "name": "ui.ChromeLogin", + "name": "login.Chrome", "errors": None, "start": "2020-01-01T15:41:30.799228462-08:00", "end": "2020-01-01T15:41:53.318914698-08:00", @@ -47,7 +47,7 @@ def safeAssertItemsEqual(self, list1, list2): if six.PY3: self.assertSetEqual(set(list1), set(list2)) else: - self.assertItemsEqual(list1, list2) + self.assertCountEqual(list1, list2) class TastTests(TestRunnerTest): @@ -101,7 +101,7 @@ def test_tast_gtest_filter(self): args = self.get_common_tast_args(False) + [ '--attr-expr=( "group:mainline" && "dep:chrome" && !informational)', - '--gtest_filter=ui.ChromeLogin:ui.WindowControl', + '--gtest_filter=login.Chrome:ui.WindowControl', ] with mock.patch.object(sys, 'argv', args),\ mock.patch.object(test_runner.subprocess, 'Popen') as mock_popen: @@ -111,7 +111,7 @@ def test_tast_gtest_filter(self): # The gtest filter should cause the Tast expr to be replaced with a list # of the tests in the filter. expected_cmd = self.get_common_tast_expectations(False) + [ - '--tast=("name:ui.ChromeLogin" || "name:ui.WindowControl")' + '--tast=("name:login.Chrome" || "name:ui.WindowControl")' ] self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0]) @@ -179,7 +179,7 @@ def test_tast_with_vars(self, use_vm): json.dump(_TAST_TEST_RESULTS_JSON, f) args = self.get_common_tast_args(use_vm) + [ - '-t=ui.ChromeLogin', + '-t=login.Chrome', '--tast-var=key=value', ] with mock.patch.object(sys, 'argv', args),\ @@ -187,7 +187,30 @@ def test_tast_with_vars(self, use_vm): mock_popen.return_value.returncode = 0 test_runner.main() expected_cmd = self.get_common_tast_expectations(use_vm) + [ - '--tast', 'ui.ChromeLogin', '--tast-var', 'key=value' + '--tast', 'login.Chrome', '--tast-var', 'key=value' + ] + + self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0]) + + @parameterized.expand([ + [True], + [False], + ]) + def test_tast_retries(self, use_vm): + """Tests running a tast tests with retries.""" + with open(os.path.join(self._tmp_dir, 'streamed_results.jsonl'), 'w') as f: + json.dump(_TAST_TEST_RESULTS_JSON, f) + + args = self.get_common_tast_args(use_vm) + [ + '-t=login.Chrome', + '--tast-retries=1', + ] + with mock.patch.object(sys, 'argv', args),\ + mock.patch.object(test_runner.subprocess, 'Popen') as mock_popen: + mock_popen.return_value.returncode = 0 + test_runner.main() + expected_cmd = self.get_common_tast_expectations(use_vm) + [ + '--tast', 'login.Chrome', '--tast-retries=1' ] self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0]) @@ -202,7 +225,7 @@ def test_tast(self, use_vm): json.dump(_TAST_TEST_RESULTS_JSON, f) args = self.get_common_tast_args(use_vm) + [ - '-t=ui.ChromeLogin', + '-t=login.Chrome', ] with mock.patch.object(sys, 'argv', args),\ mock.patch.object(test_runner.subprocess, 'Popen') as mock_popen: @@ -210,7 +233,7 @@ def test_tast(self, use_vm): test_runner.main() expected_cmd = self.get_common_tast_expectations(use_vm) + [ - '--tast', 'ui.ChromeLogin' + '--tast', 'login.Chrome' ] self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0]) @@ -281,10 +304,10 @@ def test_gtest_with_vpython(self): gtest.build_test_command() # Create the two expected tools, and the test should be ready to run. - with open(os.path.join(args.vpython_dir, 'vpython'), 'w'): + with open(os.path.join(args.vpython_dir, 'vpython3'), 'w'): pass # Just touch the file. os.mkdir(os.path.join(args.vpython_dir, 'bin')) - with open(os.path.join(args.vpython_dir, 'bin', 'python'), 'w'): + with open(os.path.join(args.vpython_dir, 'bin', 'python3'), 'w'): pass gtest = test_runner.GTestTest(args, None) gtest.build_test_command() @@ -293,10 +316,12 @@ def test_gtest_with_vpython(self): class HostCmdTests(TestRunnerTest): @parameterized.expand([ - [True], - [False], + [True, False, True], + [False, True, True], + [True, True, False], + [False, True, False], ]) - def test_host_cmd(self, is_lacros): + def test_host_cmd(self, is_lacros, is_ash, strip_chrome): args = [ 'script_name', 'host-cmd', @@ -307,8 +332,10 @@ def test_host_cmd(self, is_lacros): ] if is_lacros: args += ['--deploy-lacros'] - else: + if is_ash: args += ['--deploy-chrome'] + if strip_chrome: + args += ['--strip-chrome'] args += [ '--', 'fake_cmd', @@ -337,8 +364,10 @@ def test_host_cmd(self, is_lacros): '--lacros-launcher-script', test_runner.LACROS_LAUNCHER_SCRIPT_PATH, ] - else: - expected_cmd += ['--mount', '--nostrip', '--deploy'] + if is_ash: + expected_cmd += ['--mount', '--deploy'] + if not strip_chrome: + expected_cmd += ['--nostrip'] expected_cmd += [ '--', diff --git a/build/cipd/cipd.gni b/build/cipd/cipd.gni index e7795c1062b3..852adeff8c4a 100644 --- a/build/cipd/cipd.gni +++ b/build/cipd/cipd.gni @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -123,7 +123,7 @@ template("cipd_package_definition_by_file") { } action(target_name) { script = "//build/cipd/cipd_from_file.py" - inputs = [ "//build/cipd/cipd_from_file.py" ] + inputs = [ invoker.files_file ] args = [ "--description=" + invoker.description, "--buildtype=" + invoker.buildtype, diff --git a/build/cipd/cipd_from_file.py b/build/cipd/cipd_from_file.py index 0f08f692e346..979b2b538895 100755 --- a/build/cipd/cipd_from_file.py +++ b/build/cipd/cipd_from_file.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2021 The Chromium Authors. All rights reserved. +# Copyright 2021 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Script to generate yaml file based on FILES.cfg.""" diff --git a/build/cipd/clobber_cipd_root.py b/build/cipd/clobber_cipd_root.py deleted file mode 100755 index 5d36c72239f8..000000000000 --- a/build/cipd/clobber_cipd_root.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Clobbers a CIPD root.""" - -import argparse -import os -import shutil -import sys - - -def main(): - parser = argparse.ArgumentParser( - description='Clobbers the CIPD root in the given directory.') - - parser.add_argument( - '--root', - required=True, - help='Root directory for dependency.') - args = parser.parse_args() - - cipd_root_dir = os.path.join(args.root, '.cipd') - if os.path.exists(cipd_root_dir): - shutil.rmtree(cipd_root_dir) - - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/build/clobber.py b/build/clobber.py index 1de321234bfd..e886737dcc0c 100755 --- a/build/clobber.py +++ b/build/clobber.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2015 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -22,36 +22,43 @@ def extract_gn_build_commands(build_ninja_file): On error, returns the empty string.""" result = "" with open(build_ninja_file, 'r') as f: - # Read until the third blank line. The first thing GN writes to the file - # is "ninja_required_version = x.y.z", then the "rule gn" and the third - # is the section for "build build.ninja", separated by blank lines. - num_blank_lines = 0 - while num_blank_lines < 3: - line = f.readline() - if len(line) == 0: - return '' # Unexpected EOF. + # Reads until the first empty line after the "build build.ninja:" target. + # We assume everything before it necessary as well (eg the + # "ninja_required_version" line). + found_build_dot_ninja_target = False + for line in f.readlines(): result += line - if line[0] == '\n': - num_blank_lines = num_blank_lines + 1 - return result + if line.startswith('build build.ninja:'): + found_build_dot_ninja_target = True + if found_build_dot_ninja_target and line[0] == '\n': + return result + return '' # We got to EOF and didn't find what we were looking for. -def delete_dir(build_dir): - if os.path.islink(build_dir): - return +def _rmtree(d): # For unknown reasons (anti-virus?) rmtree of Chromium build directories # often fails on Windows. if sys.platform.startswith('win'): - subprocess.check_call(['rmdir', '/s', '/q', build_dir], shell=True) + subprocess.check_call(['rmdir', '/s', '/q', d], shell=True) else: - shutil.rmtree(build_dir) + shutil.rmtree(d) + + +def _clean_dir(build_dir): + # Remove files/sub directories individually instead of recreating the build + # dir because it fails when the build dir is symlinked or mounted. + for e in os.scandir(build_dir): + if e.is_dir(): + _rmtree(e.path) + else: + os.remove(e.path) def delete_build_dir(build_dir): # GN writes a build.ninja.d file. Note that not all GN builds have args.gn. build_ninja_d_file = os.path.join(build_dir, 'build.ninja.d') if not os.path.exists(build_ninja_d_file): - delete_dir(build_dir) + _clean_dir(build_dir) return # GN builds aren't automatically regenerated when you sync. To avoid @@ -68,15 +75,16 @@ def delete_build_dir(build_dir): except IOError: args_contents = '' - e = None + exception_during_rm = None try: - # delete_dir and os.mkdir() may fail, such as when chrome.exe is running, + # _clean_dir() may fail, such as when chrome.exe is running, # and we still want to restore args.gn/build.ninja/build.ninja.d, so catch # the exception and rethrow it later. - delete_dir(build_dir) - os.mkdir(build_dir) + # We manually rm files inside the build dir rather than using "gn clean/gen" + # since we may not have run all necessary DEPS hooks yet at this point. + _clean_dir(build_dir) except Exception as e: - pass + exception_during_rm = e # Put back the args file (if any). if args_contents != '': @@ -105,9 +113,10 @@ def delete_build_dir(build_dir): with open(build_ninja_d_file, 'w') as f: f.write('build.ninja: nonexistant_file.gn\n') - if e: + if exception_during_rm: # Rethrow the exception we caught earlier. - raise e + raise exception_during_rm + def clobber(out_dir): """Clobber contents of build directory. diff --git a/build/clobber_unittest.py b/build/clobber_unittest.py new file mode 100755 index 000000000000..d38c447b1443 --- /dev/null +++ b/build/clobber_unittest.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import pathlib +import shutil +import sys +import tempfile +import textwrap +import unittest +from unittest import mock + +import clobber + + +class TestExtractBuildCommand(unittest.TestCase): + def setUp(self): + self.build_ninja_file, self.build_ninja_path = tempfile.mkstemp(text=True) + + def tearDown(self): + os.close(self.build_ninja_file) + os.remove(self.build_ninja_path) + + def test_normal_extraction(self): + build_ninja_file_contents = textwrap.dedent(""" + ninja_required_version = 1.7.2 + + rule gn + command = ../../buildtools/gn --root=../.. -q --regeneration gen . + pool = console + description = Regenerating ninja files + + build build.ninja.stamp: gn + generator = 1 + depfile = build.ninja.d + + build build.ninja: phony build.ninja.stamp + generator = 1 + + pool build_toolchain_action_pool + depth = 72 + + pool build_toolchain_link_pool + depth = 23 + + subninja toolchain.ninja + subninja clang_newlib_x64/toolchain.ninja + subninja glibc_x64/toolchain.ninja + subninja irt_x64/toolchain.ninja + subninja nacl_bootstrap_x64/toolchain.ninja + subninja newlib_pnacl/toolchain.ninja + + build blink_python_tests: phony obj/blink_python_tests.stamp + build blink_tests: phony obj/blink_tests.stamp + + default all + """) # Based off of a standard linux build dir. + with open(self.build_ninja_path, 'w') as f: + f.write(build_ninja_file_contents) + + expected_build_ninja_file_contents = textwrap.dedent(""" + ninja_required_version = 1.7.2 + + rule gn + command = ../../buildtools/gn --root=../.. -q --regeneration gen . + pool = console + description = Regenerating ninja files + + build build.ninja.stamp: gn + generator = 1 + depfile = build.ninja.d + + build build.ninja: phony build.ninja.stamp + generator = 1 + + """) + + self.assertEqual(clobber.extract_gn_build_commands(self.build_ninja_path), + expected_build_ninja_file_contents) + + def test_unexpected_format(self): + # No "build build.ninja:" line should make it return an empty string. + build_ninja_file_contents = textwrap.dedent(""" + ninja_required_version = 1.7.2 + + rule gn + command = ../../buildtools/gn --root=../.. -q --regeneration gen . + pool = console + description = Regenerating ninja files + + subninja toolchain.ninja + + build blink_python_tests: phony obj/blink_python_tests.stamp + build blink_tests: phony obj/blink_tests.stamp + + """) + with open(self.build_ninja_path, 'w') as f: + f.write(build_ninja_file_contents) + + self.assertEqual(clobber.extract_gn_build_commands(self.build_ninja_path), + '') + + +class TestDelete(unittest.TestCase): + def setUp(self): + self.build_dir = tempfile.mkdtemp() + + pathlib.Path(os.path.join(self.build_dir, 'build.ninja')).touch() + pathlib.Path(os.path.join(self.build_dir, 'build.ninja.d')).touch() + + def tearDown(self): + shutil.rmtree(self.build_dir) + + def test_delete_build_dir_full(self): + # Create a dummy file in the build dir and ensure it gets removed. + dummy_file = os.path.join(self.build_dir, 'dummy') + pathlib.Path(dummy_file).touch() + + clobber.delete_build_dir(self.build_dir) + + self.assertFalse(os.path.exists(dummy_file)) + + def test_delete_build_dir_fail(self): + # Make delete_dir() throw to ensure it's handled gracefully. + + with mock.patch('clobber._clean_dir', side_effect=OSError): + with self.assertRaises(OSError): + clobber.delete_build_dir(self.build_dir) + + @unittest.skipIf(sys.platform == 'win32', 'Symlinks are not allowed on Windows by default') + def test_delete_build_dir_link(self): + with tempfile.TemporaryDirectory() as tmpdir: + # create a symlink. + build_dir = os.path.join(tmpdir, 'link') + os.symlink(self.build_dir, build_dir) + + # create a dummy file. + dummy_file = os.path.join(build_dir, 'dummy') + pathlib.Path(dummy_file).touch() + clobber.delete_build_dir(build_dir) + + self.assertFalse(os.path.exists(dummy_file)) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/compiled_action.gni b/build/compiled_action.gni index 7e25a0b6fc20..6a632bdfa6bf 100644 --- a/build/compiled_action.gni +++ b/build/compiled_action.gni @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/compute_build_timestamp.py b/build/compute_build_timestamp.py index ceb507b26c96..befe8445608d 100755 --- a/build/compute_build_timestamp.py +++ b/build/compute_build_timestamp.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2018 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Returns a timestamp that approximates the build date. @@ -27,7 +27,6 @@ # the symbol server, so rarely changing timestamps can cause conflicts there # as well. We only upload symbols for official builds to the symbol server. -from __future__ import print_function import argparse import calendar @@ -116,9 +115,21 @@ def main(): # builds are typically added to symbol servers and Windows symbol servers # use the link timestamp as the prime differentiator, but for unofficial # builds we do lots of quantization to avoid churn. - if args.build_type != 'official': + offset = 0 + if args.build_type == 'official': + if os.name == 'nt': + version_path = os.path.join(THIS_DIR, os.pardir, 'chrome', 'VERSION') + with open(version_path) as f: + patch_line = f.readlines()[3].strip() + # Use the patch number as an offset to the build date so that multiple + # versions with different patch numbers built from the same source code + # will get different build_date values. This is critical for Windows + # symbol servers, to avoid collisions. + assert patch_line.startswith('PATCH=') + offset = int(patch_line[6:]) + else: build_date = GetUnofficialBuildDate(build_date) - print(int(calendar.timegm(build_date.utctimetuple()))) + print(offset + int(calendar.timegm(build_date.utctimetuple()))) return 0 diff --git a/build/config/BUILD.gn b/build/config/BUILD.gn index a78ddc566b96..2106261c1f2c 100644 --- a/build/config/BUILD.gn +++ b/build/config/BUILD.gn @@ -1,11 +1,10 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/config/c++/c++.gni") import("//build/config/chrome_build.gni") import("//build/config/chromecast_build.gni") -import("//build/config/crypto.gni") import("//build/config/dcheck_always_on.gni") import("//build/config/features.gni") @@ -49,9 +48,6 @@ config("feature_flags") { defines = [] if (dcheck_always_on) { defines += [ "DCHECK_ALWAYS_ON=1" ] - if (dcheck_is_configurable) { - defines += [ "DCHECK_IS_CONFIGURABLE" ] - } } if (use_udev) { # TODO(brettw) should probably be "=1". @@ -63,10 +59,10 @@ config("feature_flags") { if (use_glib) { defines += [ "USE_GLIB=1" ] } - if (use_nss_certs) { - defines += [ "USE_NSS_CERTS=1" ] - } if (use_ozone && !is_android) { + # Chrome code should check BUILDFLAG(IS_OZONE) instead of + # defined(USE_OZONE). + # # Note that some Chrome OS builds unconditionally set |use_ozone| to true, # but they also build some targets with the Android toolchain. This ensures # that Android targets still build with USE_OZONE=0 in such cases. @@ -75,9 +71,6 @@ config("feature_flags") { # setting use_ozone globally. defines += [ "USE_OZONE=1" ] } - if (use_x11) { - defines += [ "USE_X11=1" ] - } if (is_asan || is_hwasan || is_lsan || is_tsan || is_msan) { defines += [ "MEMORY_TOOL_REPLACES_ALLOCATOR" ] } @@ -96,7 +89,7 @@ config("feature_flags") { if (is_msan) { defines += [ "MEMORY_SANITIZER" ] } - if (is_ubsan || is_ubsan_null || is_ubsan_vptr || is_ubsan_security) { + if (is_ubsan || is_ubsan_vptr || is_ubsan_security) { defines += [ "UNDEFINED_SANITIZER" ] } if (is_official_build) { @@ -238,6 +231,9 @@ group("common_deps") { visibility = [ ":executable_deps", ":loadable_module_deps", + ":rust_bin_deps", + ":rust_cdylib_deps", + ":rust_dylib_deps", ":shared_library_deps", ] @@ -264,6 +260,9 @@ group("common_deps") { if (is_fuchsia) { public_deps += [ "//third_party/fuchsia-sdk/sdk/build/config:runtime_library_group" ] + if (is_asan) { + public_deps += [ "//build/config/fuchsia:asan_runtime_library" ] + } } } @@ -273,16 +272,44 @@ group("executable_deps") { if (export_libcxxabi_from_executables) { public_deps += [ "//buildtools/third_party/libc++abi" ] } + public_configs = [ "//build/config/sanitizers:link_executable" ] +} + +# Only the rust_bin template in BUILDCONFIG.gn should reference this. +group("rust_bin_deps") { + public_deps = [ ":common_deps" ] + if (export_libcxxabi_from_executables) { + public_deps += [ "//buildtools/third_party/libc++abi" ] + } + public_configs = [ "//build/config/sanitizers:link_executable" ] } # Only the loadable_module template in BUILDCONFIG.gn should reference this. group("loadable_module_deps") { public_deps = [ ":common_deps" ] + + public_configs = [ "//build/config/sanitizers:link_shared_library" ] } # Only the shared_library template in BUILDCONFIG.gn should reference this. group("shared_library_deps") { public_deps = [ ":common_deps" ] + + public_configs = [ "//build/config/sanitizers:link_shared_library" ] +} + +# Only the rust_dylib template in BUILDCONFIG.gn should reference this. +group("rust_dylib_deps") { + public_deps = [ ":common_deps" ] + + public_configs = [ "//build/config/sanitizers:link_shared_library" ] +} + +# Only the rust_cdylib template in BUILDCONFIG.gn should reference this. +group("rust_cdylib_deps") { + public_deps = [ ":common_deps" ] + + public_configs = [ "//build/config/sanitizers:link_shared_library" ] } # Executable configs ----------------------------------------------------------- @@ -301,6 +328,7 @@ config("executable_config") { if (is_win) { configs += _windows_linker_configs + configs += [ "//build/config/win:exe_flags" ] } else if (is_mac) { configs += [ "//build/config/mac:mac_dynamic_flags" ] } else if (is_ios) { @@ -310,10 +338,8 @@ config("executable_config") { ] } else if (is_linux || is_chromeos || is_android || current_os == "aix") { configs += [ "//build/config/gcc:executable_config" ] - if (is_chromecast) { + if (is_castos || is_cast_android) { configs += [ "//build/config/chromecast:executable_config" ] - } else if (is_fuchsia) { - configs += [ "//build/config/fuchsia:executable_config" ] } } @@ -325,7 +351,6 @@ config("executable_config") { if (use_locally_built_instrumented_libraries) { configs += [ "//third_party/instrumented_libraries:locally_built_ldflags" ] } - configs += [ "//build/config/sanitizers:link_executable" ] } # Shared library configs ------------------------------------------------------- @@ -343,7 +368,7 @@ config("shared_library_config") { "//build/config/ios:ios_dynamic_flags", "//build/config/ios:ios_shared_library_flags", ] - } else if (is_chromecast) { + } else if (is_castos || is_cast_android) { configs += [ "//build/config/chromecast:shared_library_config" ] } else if (is_linux || is_chromeos || current_os == "aix") { configs += [ "//build/config/gcc:shared_library_config" ] @@ -357,7 +382,6 @@ config("shared_library_config") { if (use_locally_built_instrumented_libraries) { configs += [ "//third_party/instrumented_libraries:locally_built_ldflags" ] } - configs += [ "//build/config/sanitizers:link_shared_library" ] } # Add this config to your target to enable precompiled headers. @@ -388,3 +412,10 @@ config("precompiled_headers") { } } } + +# Add this config to link steps in order to compress debug sections. This is +# especially useful on 32-bit architectures in order to keep file sizes under +# 4gb. +config("compress_debug_sections") { + ldflags = [ "-gz" ] +} diff --git a/build/config/BUILDCONFIG.gn b/build/config/BUILDCONFIG.gn index 46bc4769236e..33651426b6d5 100644 --- a/build/config/BUILDCONFIG.gn +++ b/build/config/BUILDCONFIG.gn @@ -1,4 +1,4 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -6,7 +6,7 @@ # WHAT IS THIS FILE? # ============================================================================= # -# This is the master GN build configuration. This file is loaded after the +# This is the main GN build configuration. This file is loaded after the # build args (args.gn) for the build directory and after the toplevel ".gn" # file (which points to this file as the build configuration). # @@ -128,6 +128,11 @@ declare_args() { # release (!is_debug). This might be better expressed as a tri-state # (debug, release, official) but for historical reasons there are two # separate flags. + # + # IMPORTANT NOTE: (!is_debug) is *not* sufficient to get satisfying + # performance. In particular, DCHECK()s are still enabled for release builds, + # which can halve overall performance, and do increase memory usage. Always + # set "is_official_build" to true for any build intended to ship to end-users. is_official_build = false # Set to true when compiling with the Clang compiler. @@ -145,6 +150,11 @@ declare_args() { # every toolchain can pass through the "global" value via toolchain_args(). host_toolchain = "" + # Do not set this directly. + # It should be set only by //build/toolchains/android:robolectric_x64. + # True when compiling native code for use with robolectric_binary(). + is_robolectric = false + # DON'T ADD MORE FLAGS HERE. Read the comment above. } @@ -164,6 +174,8 @@ declare_args() { } assert(!(is_debug && is_official_build), "Can't do official debug builds") +assert(!(current_os == "ios" && is_component_build), + "Can't use component build on iOS") # ============================================================================== # TOOLCHAIN SETUP @@ -211,6 +223,8 @@ if (host_toolchain == "") { } } else if (host_os == "aix") { host_toolchain = "//build/toolchain/aix:$host_cpu" + } else if (host_os == "zos") { + host_toolchain = "//build/toolchain/zos:$host_cpu" } else { assert(false, "Unsupported host_os: $host_os") } @@ -219,8 +233,7 @@ if (host_toolchain == "") { _default_toolchain = "" if (target_os == "android") { - assert(host_os == "linux" || host_os == "mac", - "Android builds are only supported on Linux and Mac hosts.") + assert(host_os == "linux", "Android builds are only supported on Linux.") _default_toolchain = "//build/toolchain/android:android_clang_$target_cpu" } else if (target_os == "chromeos" || target_os == "linux") { # See comments in build/toolchain/cros/BUILD.gn about board compiles. @@ -253,6 +266,8 @@ if (target_os == "android") { _default_toolchain = "//build/toolchain/win:uwp_$target_cpu" } else if (target_os == "aix") { _default_toolchain = "//build/toolchain/aix:$target_cpu" +} else if (target_os == "zos") { + _default_toolchain = "//build/toolchain/zos:$target_cpu" } else { assert(false, "Unsupported target_os: $target_os") } @@ -318,21 +333,22 @@ default_compiler_configs = [ "//build/config/compiler:afdo", "//build/config/compiler:afdo_optimize_size", "//build/config/compiler:cet_shadow_stack", + "//build/config/compiler:chromium_code", "//build/config/compiler:compiler", "//build/config/compiler:compiler_arm_fpu", "//build/config/compiler:compiler_arm_thumb", - "//build/config/compiler:chromium_code", "//build/config/compiler:default_include_dirs", + "//build/config/compiler:default_init_stack_vars", "//build/config/compiler:default_optimization", "//build/config/compiler:default_stack_frames", "//build/config/compiler:default_symbols", "//build/config/compiler:export_dynamic", "//build/config/compiler:no_exceptions", "//build/config/compiler:no_rtti", + "//build/config/compiler:no_unresolved_symbols", "//build/config/compiler:runtime_library", "//build/config/compiler:thin_archive", "//build/config/compiler:thinlto_optimize_default", - "//build/config/compiler:default_init_stack_vars", "//build/config/compiler/pgo:default_pgo_flags", "//build/config/coverage:default_coverage", "//build/config/sanitizers:default_sanitizer_flags", @@ -382,10 +398,18 @@ if (is_debug) { # Static libraries and source sets use only the compiler ones. set_defaults("static_library") { configs = default_compiler_configs + + # For Rust, a static library involves linking in all dependencies, and it + # performs LTO. But since we will perform LTO in the C++ linker which + # consumes the library, we defer LTO from Rust into the linker. + configs += [ "//build/config/compiler:rust_defer_lto_to_linker" ] } set_defaults("source_set") { configs = default_compiler_configs } +set_defaults("rust_library") { + configs = default_compiler_configs +} # Compute the set of configs common to all linked targets (shared libraries, # loadable modules, executables) to avoid duplication below. @@ -399,8 +423,8 @@ if (is_win) { # that shouldn't use the windows subsystem. "//build/config/win:console", ] -} else if (is_mac) { - _linker_configs = [ "//build/config/mac:strip_all" ] +} else if (is_apple) { + _linker_configs = [ "//build/config/apple:strip_all" ] } else { _linker_configs = [] } @@ -450,6 +474,22 @@ set_defaults("loadable_module") { } } +default_rust_proc_macro_configs = + default_shared_library_configs + [ "//build/rust:proc_macro_extern" ] + + # Rust proc macros don't support (Thin)LTO, so always remove it. + [ + "//build/config/compiler:thinlto_optimize_default", + "//build/config/compiler:thinlto_optimize_max", + ] - + [ + "//build/config/compiler:thinlto_optimize_default", + "//build/config/compiler:thinlto_optimize_max", + ] + +set_defaults("rust_proc_macro") { + configs = default_rust_proc_macro_configs +} + # A helper for forwarding testonly and visibility. # Forwarding "*" does not include variables from outer scopes (to avoid copying # all globals into each template invocation), so it will not pick up @@ -474,6 +514,9 @@ foreach(_target_type, "executable", "loadable_module", "shared_library", + "rust_bin", + "rust_dylib", + "rust_cdylib", ]) { template(_target_type) { # Alias "target_name" because it is clobbered by forward_variables_from(). @@ -499,7 +542,7 @@ foreach(_target_type, # On Android, write shared library output file to metadata. We will use # this information to, for instance, collect all shared libraries that # should be packaged into an APK. - if (!defined(invoker.metadata) && is_android && + if (!defined(invoker.metadata) && (is_android || is_robolectric) && (_target_type == "shared_library" || _target_type == "loadable_module")) { _output_name = _target_name @@ -550,6 +593,16 @@ foreach(_target_type, template("component") { if (is_component_build) { _component_mode = "shared_library" + + # Generate a unique output_name for a shared library if not set by invoker. + if (!defined(invoker.output_name)) { + _output_name = get_label_info(":$target_name", "label_no_toolchain") + _output_name = + string_replace(_output_name, "$target_name:$target_name", target_name) + _output_name = string_replace(_output_name, "//", "") + _output_name = string_replace(_output_name, "/", "_") + _output_name = string_replace(_output_name, ":", "_") + } } else if (defined(invoker.static_component_type)) { assert(invoker.static_component_type == "static_library" || invoker.static_component_type == "source_set") @@ -562,19 +615,139 @@ template("component") { _component_mode = "static_library" } target(_component_mode, target_name) { + if (defined(_output_name)) { + output_name = _output_name + } forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) } } # Component defaults +# Set a variable since we also want to make this available +# to mixed_component.gni +if (is_component_build) { + default_component_configs = default_shared_library_configs + if (is_android) { + default_component_configs -= + [ "//build/config/android:hide_all_but_jni_onload" ] + } +} else { + default_component_configs = default_compiler_configs +} + set_defaults("component") { - if (is_component_build) { - configs = default_shared_library_configs - if (is_android) { - configs -= [ "//build/config/android:hide_all_but_jni_onload" ] + configs = default_component_configs +} + +# ============================================================================= +# ACTION OVERRIDE +# ============================================================================= +# +# We override gn action() to support remote execution using rewrapper. The +# invoker should set allow_remote to true if remote execution is desired. +# +# As remote execution requires inputs to be made more explicit than is normally +# expected with gn, you may find that setting allow_remote to true will result +# in many missing file errors. In most cases, this should be resolved by +# explicitly declaring these inputs/sources. +# +# However, it may be impractical to determine these inputs in gn. For such +# cases, the invoker can specify a custom input processor, which are currently +# defined and implemented in //build/util/action_remote.py. The appropriate +# value should be set using the custom_processor arg. + +# Variables needed by rbe.gni aren't available at the top of this file. +import("//build/toolchain/rbe.gni") + +# TODO(b/253987456): Add action_foreach support. +foreach(_target_type, [ "action" ]) { + template(_target_type) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, [ "allow_remote" ]) + action("${target_name}") { + forward_variables_from(invoker, + [ + "args", + "assert_no_deps", + "check_includes", + "configs", + "data_deps", + "data", + "depfile", + "deps", + "metadata", + "outputs", + "pool", + "script", + "public_configs", + "public_deps", + "response_file_contents", + "sources", + "write_runtime_deps", + ]) + allow_remote = false + if (defined(invoker.allow_remote)) { + allow_remote = invoker.allow_remote + } + + # If remote execution is desired, only run remotely when use_remoteexec + # is enabled, and the environment is not nacl. + # TODO(b/259381924): Investigate enabling in nacl config. + if (allow_remote && use_remoteexec && !is_nacl) { + pool = "//build/toolchain:remote_action_pool($default_toolchain)" + script = "//build/util/action_remote.py" + inputs = [ invoker.script ] + + re_inputs = [ rebase_path(invoker.script, rbe_exec_root) ] + if (defined(invoker.inputs)) { + foreach(input, invoker.inputs) { + re_inputs += [ rebase_path(input, rbe_exec_root) ] + inputs += [ input ] + } + } + if (defined(invoker.sources)) { + foreach(source, invoker.sources) { + re_inputs += [ rebase_path(source, rbe_exec_root) ] + } + } + + re_outputs = [] + if (defined(invoker.outputs)) { + foreach(output, invoker.outputs) { + re_outputs += [ rebase_path(output, rbe_exec_root) ] + } + } + + # Write input/output lists to files as these can grow extremely large. + re_inputs_file = "$target_gen_dir/${target_name}__remote_inputs.rsp" + write_file(re_inputs_file, re_inputs) + inputs += [ re_inputs_file ] + re_outputs_file = "$target_gen_dir/${target_name}__remote_outputs.rsp" + write_file(re_outputs_file, re_outputs) + + args = [] + args += [ "$rbe_bin_dir/rewrapper" ] + if (defined(invoker.custom_processor)) { + args += [ "--custom_processor=" + invoker.custom_processor ] + } + + args += [ + "--cfg=$rbe_py_cfg_file", + "--exec_root=$rbe_exec_root", + "--input_list_paths=" + rebase_path(re_inputs_file, root_build_dir), + "--output_list_paths=" + rebase_path(re_outputs_file, root_build_dir), + "python3", + rebase_path(invoker.script, root_build_dir), + ] + + if (defined(invoker.args)) { + args += invoker.args + } + } else { + forward_variables_from(invoker, [ "inputs" ]) + not_needed(invoker, [ "custom_processor" ]) + } } - } else { - configs = default_compiler_configs } } diff --git a/build/config/OWNERS b/build/config/OWNERS index eeb67065c2f0..580fa2ef22ad 100644 --- a/build/config/OWNERS +++ b/build/config/OWNERS @@ -1,5 +1,4 @@ -dpranke@google.com -scottmg@chromium.org - per-file ozone.gni=file://ui/ozone/OWNERS per-file ozone_extra.gni=file://ui/ozone/OWNERS +per-file rust.gni=file://build/rust/OWNERS +per-file chromecast_build.gni=file://build/config/chromecast/OWNERS diff --git a/build/config/aix/BUILD.gn b/build/config/aix/BUILD.gn index 6c8749ab5c3a..6e55c83938fc 100644 --- a/build/config/aix/BUILD.gn +++ b/build/config/aix/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -28,6 +28,7 @@ config("compiler") { "-maix64", "-fdata-sections", "-ffunction-sections", + "-fno-extern-tls-init", "-O3", # "-Werror" @@ -46,4 +47,15 @@ config("compiler") { "-maix64", "-Wl,-bbigtoc", ] + + if (is_component_build) { + cflags += [ "-fpic" ] + ldflags += [ + "-Wl,-brtl", + + # -bnoipath so that only names of .so objects are stored in loader + # section, excluding leading "./" + "-Wl,-bnoipath", + ] + } } diff --git a/build/config/android/BUILD.gn b/build/config/android/BUILD.gn index 8eed45e80903..63b37e0c17c6 100644 --- a/build/config/android/BUILD.gn +++ b/build/config/android/BUILD.gn @@ -1,8 +1,8 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -import("//build/config/android/config.gni") +import("//build/config/android/rules.gni") import("//build/config/c++/c++.gni") import("//build/config/compiler/compiler.gni") import("//build/config/sanitizers/sanitizers.gni") @@ -33,32 +33,34 @@ config("compiler") { "ANDROID_NDK_VERSION_ROLL=${android_ndk_version}_1", ] - if (current_cpu == "mips64el") { - cflags += [ - # Have to force IAS for mips64. - "-fintegrated-as", - ] - } - ldflags = [ - # Don't allow visible symbols from libgcc or libc++ to be - # re-exported. - "-Wl,--exclude-libs=libgcc.a", - # Don't allow visible symbols from libraries that contain # assembly code with symbols that aren't hidden properly. # http://crbug.com/448386 "-Wl,--exclude-libs=libvpx_assembly_arm.a", ] - # TODO(crbug.com/1184398): Move to compiler-rt when we are ready. - ldflags += [ "--rtlib=libgcc" ] if (current_cpu == "arm64") { - # For outline atomics on AArch64 (can't pass this unconditionally - # due to unused flag warning on other targets). - cflags += [ "--rtlib=libgcc" ] + # Reduce the page size from 65536 in order to reduce binary size slightly + # by shrinking the alignment gap between segments. This also causes all + # segments to be mapped adjacently, which breakpad relies on. + ldflags += [ "-Wl,-z,max-page-size=4096" ] + } + + if (current_cpu == "arm64") { + if (arm_control_flow_integrity == "standard") { + cflags += [ "-mbranch-protection=standard" ] + rustflags = [ "-Zbranch-protection=bti" ] + } else if (arm_control_flow_integrity == "pac") { + cflags += [ "-mbranch-protection=pac-ret" ] + rustflags = [ "-Zbranch-protection=pac-ret" ] + } } + # Instead of using an unwind lib from the toolchain, + # buildtools/third_party/libunwind will be built and used directly. + ldflags += [ "--unwindlib=none" ] + # $compile_api_level corresponds to the API level used for the sysroot path # calculation in //build/config/android/config.gni if (android_64bit_target_cpu) { @@ -80,11 +82,8 @@ config("compiler") { # that is Android-only. Please see that target for advice on what should go in # :runtime_library vs. :compiler. config("runtime_library") { - # Let the linker find libgcc.a. - ldflags = [ "--gcc-toolchain=" + - rebase_path(android_toolchain_root, root_build_dir) ] - libs = [] + ldflags = [] # On 64-bit platforms, the only symbols provided by libandroid_support.a are # strto{d,f,l,ul}_l. These symbols are not used by our libc++, and newer NDKs @@ -94,24 +93,9 @@ config("runtime_library") { libs += [ "android_support" ] } - # arm builds of libc++ starting in NDK r12 depend on unwind. - if (current_cpu == "arm") { - libs += [ "unwind" ] - } - if (current_cpu == "arm" && arm_version == 6) { libs += [ "atomic" ] } - - if (current_cpu == "mipsel") { - libs += [ "atomic" ] - } - - # TODO(jdduke) Re-enable on mips after resolving linking - # issues with libc++ (crbug.com/456380). - if (current_cpu != "mipsel" && current_cpu != "mips64el") { - ldflags += [ "-Wl,--warn-shared-textrel" ] - } } config("hide_all_but_jni_onload") { @@ -130,6 +114,27 @@ config("lld_pack_relocations") { ldflags = [ "-Wl,--pack-dyn-relocs=android" ] } +config("lld_relr_relocations") { + # RELR supported API 30+, but supported 28+ with --use-android-relr-tags. + # https://android.googlesource.com/platform/bionic/+/master/android-changes-for-ndk-developers.md#relative-relocations-relr + ldflags = [ "-Wl,--pack-dyn-relocs=relr,--use-android-relr-tags" ] +} + +config("lld_branch_target_hardening") { + # Config opts a shared library into BTI linker hardening. This + # is an opt-in config (rather than default-enabled) to avoid + # interfering with the V8 CFI bots (crbug.com/1334614). + if (current_cpu == "arm64") { + if (arm_control_flow_integrity == "standard") { + # Linking objects without GNU_PROPERTY_AARCH64_FEATURE_1_BTI + # in their .gnu.note section implicitly results in the final + # binary losing Branch Target Identification (BTI) support. + # Issue a warning if this happens. + ldflags = [ "-Wl,-z,force-bti" ] + } + } +} + # Used for instrumented build to generate the orderfile. config("default_orderfile_instrumentation") { if (use_order_profiling) { @@ -142,12 +147,22 @@ config("default_orderfile_instrumentation") { } } +config("jni_include_dir") { + include_dirs = [ jni_headers_dir ] +} + if (current_toolchain == default_toolchain) { pool("goma_javac_pool") { # Override action_pool when goma is enabled for javac. depth = 10000 } + # nocompile tests share output directory to avoid them all needing to rebuild + # things. But this also means they can't run in parallel. + pool("nocompile_pool") { + depth = 1 + } + # When defined, this pool should be used instead of link_pool for command # that need 1-2GB of RAM. https://crbug.com/1078460 if (defined(java_cmd_pool_size)) { diff --git a/build/config/android/DIR_METADATA b/build/config/android/DIR_METADATA new file mode 100644 index 000000000000..cdc2d6fb6eb6 --- /dev/null +++ b/build/config/android/DIR_METADATA @@ -0,0 +1 @@ +mixins: "//build/android/COMMON_METADATA" diff --git a/build/config/android/abi.gni b/build/config/android/abi.gni index 53e57013588d..e044ac6745b5 100644 --- a/build/config/android/abi.gni +++ b/build/config/android/abi.gni @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -29,6 +29,11 @@ declare_args() { # Build additional browser splits with HWASAN instrumentation enabled. build_hwasan_splits = false + + # *For CQ puposes only* Leads to non-working APKs. + # Forces all APKs/bundles to be 64-bit only to improve build speed in the CQ + # (no need to also build 32-bit library). + skip_secondary_abi_for_cq = false } assert(!devtools_instrumentation_dumping || use_order_profiling, @@ -64,11 +69,17 @@ if (current_cpu == "x86") { # Place holder for mips64 support, not tested. android_abi_target = "mips64el-linux-android" +} else if (current_cpu == "riscv64") { + android_app_abi = "riscv64" + + # Place holder for riscv64 support, not tested. + android_abi_target = "riscv64-linux-android" } else { assert(false, "Unknown Android ABI: " + current_cpu) } -if (target_cpu == "arm64" || target_cpu == "x64" || target_cpu == "mips64el") { +if (target_cpu == "arm64" || target_cpu == "x64" || target_cpu == "mips64el" || + target_cpu == "riscv64") { android_64bit_target_cpu = true } else if (target_cpu == "arm" || target_cpu == "x86" || target_cpu == "mipsel") { diff --git a/build/config/android/android_nocompile.gni b/build/config/android/android_nocompile.gni index a99bad3cc492..0b3f517bd976 100644 --- a/build/config/android/android_nocompile.gni +++ b/build/config/android/android_nocompile.gni @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -34,10 +34,12 @@ template("android_nocompile_test_suite") { action(target_name) { testonly = true script = "//build/android/gyp/nocompile_test.py" + pool = "//build/config/android:nocompile_pool" _tests = invoker.tests _test0 = _tests[0] _test0_dir = get_label_info(_test0["target"], "dir") + _test0_target_out_dir = get_label_info(_test0["target"], "target_out_dir") foreach(_test_config, _tests) { assert( _test0_dir == get_label_info(_test_config["target"], "dir"), @@ -49,17 +51,28 @@ template("android_nocompile_test_suite") { deps += invoker.deps } - inputs = [] + sources = [] + if (defined(invoker.sources)) { + sources += invoker.sources + } + + # Depend on compile_java Python scripts so that the action is re-run whenever the script is + # modified. + _pydeps = [ "//build/android/gyp/compile_java.pydeps" ] if (defined(invoker.pydeps)) { - foreach(_pydeps_file, invoker.pydeps) { - _pydeps_file_lines = read_file(_pydeps_file, "list lines") - _pydeps_entries = filter_exclude(_pydeps_file_lines, [ "#*" ]) - _pydeps_file_dir = get_path_info(_pydeps_file, "dir") - inputs += rebase_path(_pydeps_entries, ".", _pydeps_file_dir) - } + _pydeps += invoker.pydeps + } + + inputs = [] + foreach(_pydeps_file, _pydeps) { + _pydeps_file_lines = [] + _pydeps_file_lines = read_file(_pydeps_file, "list lines") + _pydeps_entries = [] + _pydeps_entries = filter_exclude(_pydeps_file_lines, [ "#*" ]) + _pydeps_file_dir = get_path_info(_pydeps_file, "dir") + inputs += rebase_path(_pydeps_entries, ".", _pydeps_file_dir) } - sources = [] _json_test_configs = [] foreach(_test_config, _tests) { _test = _test_config["target"] @@ -78,13 +91,18 @@ template("android_nocompile_test_suite") { _config_path = "$target_gen_dir/${target_name}.nocompile_config" write_file(_config_path, _json_test_configs, "json") + # Compute output directory for no-compile tests based on the directory containing test + # targets instead of based on the test suite target name. This avoids calling 'gn gen' for each + # android_nocompile_test_suite() for test suites whose tests are declared in the same BUILD.gn + # file. + _out_dir = "${_test0_target_out_dir}/nocompile_out" + _stamp_path = "${target_gen_dir}/${target_name}.stamp" args = [ "--gn-args-path", "args.gn", "--out-dir", - rebase_path("${target_out_dir}/${target_name}/nocompile_out", - root_build_dir), + rebase_path(_out_dir, root_build_dir), "--test-configs-path", rebase_path(_config_path, root_build_dir), "--stamp", diff --git a/build/config/android/build_vars.gni b/build/config/android/build_vars.gni index a47607dc7c4d..27866a7c1232 100644 --- a/build/config/android/build_vars.gni +++ b/build/config/android/build_vars.gni @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -17,7 +17,9 @@ android_build_vars_json = { android_sdk_root = rebase_path(android_sdk_root, root_build_dir) android_sdk_version = android_sdk_version android_tool_prefix = rebase_path(android_tool_prefix, root_build_dir) + default_min_sdk_version = default_min_sdk_version final_android_sdk = final_android_sdk + public_android_sdk_version = public_android_sdk_version if (defined(android_secondary_abi_cpu)) { android_secondary_abi_toolchain = diff --git a/build/config/android/channel.gni b/build/config/android/channel.gni index 6348bb996e6f..0f8d45337d35 100644 --- a/build/config/android/channel.gni +++ b/build/config/android/channel.gni @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/android/config.gni b/build/config/android/config.gni index 8ffe59188aa3..5f4836726a3e 100644 --- a/build/config/android/config.gni +++ b/build/config/android/config.gni @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -8,6 +8,7 @@ # toolchain, this GNI file may be read and processed from within Chrome OS # toolchains. Checking |is_android| here would therefore be too restrictive. if (is_android || is_chromeos) { + import("//build/config/android/channel.gni") import("//build/config/chromecast_build.gni") import("//build/config/dcheck_always_on.gni") import("//build_overrides/build.gni") @@ -30,45 +31,50 @@ if (is_android || is_chromeos) { } } - has_chrome_android_internal = - exec_script("//build/dir_exists.py", - [ rebase_path("//clank", root_build_dir) ], - "string") == "True" - # We are using a separate declare_args block for only this argument so that # we can decide if we have to pull in definitions from the internal config # early. declare_args() { # Enables using the internal Chrome for Android repository. The default - # value depends on whether the repository is available, and if it's not but - # this argument is manually set to True, the generation will fail. - # The main purpose of this argument is to avoid having to maintain 2 - # repositories to support both public only and internal builds. - enable_chrome_android_internal = has_chrome_android_internal + # is set from gclient vars, with target_os needed for chromeos. + # Can be set to false to disable all internal android things. + enable_chrome_android_internal = + build_with_chromium && checkout_src_internal && target_os == "android" # The default to use for android:minSdkVersion for targets that do # not explicitly set it. default_min_sdk_version = 24 - # [WIP] Allows devs to achieve much faster edit-build-install cycles. - # Currently only works for ChromeModern apks due to incremental install. - # This needs to be in a separate declare_args as it determines some of the - # args in the main declare_args block below. - android_fast_local_dev = false - } - - declare_args() { - # Android API level for 32 bits platforms - android32_ndk_api_level = default_min_sdk_version - - # Android API level for 64 bits platforms - if (default_min_sdk_version < 24) { - android64_ndk_api_level = 24 + # Static analysis can be either "on" or "off" or "build_server". This + # controls how android lint, error-prone, bytecode checks are run. This + # needs to be in a separate declare_args as it determines some of the args + # in the main declare_args block below. + # "on" is the default. + # "off" avoids running any static analysis. This is the default for + # official builds to reduce build times. Failures in static analysis + # would have been caught by other bots. + # "build_server" ensures that fast_local_dev_server.py is running and + # offloads analysis tasks to it to be run after the build completes. + # This is the recommended setting for local development. + if (is_official_build) { + android_static_analysis = "off" } else { - android64_ndk_api_level = default_min_sdk_version + android_static_analysis = "on" } + + # Build incremental targets whenever possible. + # See //build/android/incremental_install/README.md for more details. + incremental_install = false } + # Avoid typos when setting android_static_analysis in args.gn. + assert(android_static_analysis == "on" || android_static_analysis == "off" || + android_static_analysis == "build_server") + + # This configuration has not bot coverage and has broken multiple times. + # Warn against it. + assert(!(enable_chrome_android_internal && skip_secondary_abi_for_cq)) + if (enable_chrome_android_internal) { import("//clank/config.gni") } else { @@ -81,18 +87,28 @@ if (is_android || is_chromeos) { if (!defined(default_android_ndk_root)) { default_android_ndk_root = "//third_party/android_ndk" - default_android_ndk_version = "r25" - default_android_ndk_major_version = 25 + default_android_ndk_version = "r23" + default_android_ndk_major_version = 23 } else { assert(defined(default_android_ndk_version)) assert(defined(default_android_ndk_major_version)) } public_android_sdk_root = "//third_party/android_sdk/public" - if (android_sdk_release == "r") { + public_android_sdk_build_tools = + "${public_android_sdk_root}/build-tools/33.0.0" + public_android_sdk_version = "33" + if (android_sdk_release == "t") { + default_android_sdk_root = public_android_sdk_root + default_android_sdk_version = public_android_sdk_version + default_android_sdk_build_tools_version = "33.0.0" + public_android_sdk = true + } + + if (android_sdk_release == "tprivacysandbox") { default_android_sdk_root = public_android_sdk_root - default_android_sdk_version = "30" - default_android_sdk_build_tools_version = "30.0.1" + default_android_sdk_version = "TiramisuPrivacySandbox" + default_android_sdk_build_tools_version = "33.0.0" public_android_sdk = true } @@ -105,7 +121,7 @@ if (is_android || is_chromeos) { # Purposefully repeated so that downstream can change # default_android_sdk_root without changing lint version. default_lint_android_sdk_root = public_android_sdk_root - default_lint_android_sdk_version = 30 + default_lint_android_sdk_version = 33 } if (!defined(default_extras_android_sdk_root)) { @@ -124,31 +140,7 @@ if (is_android || is_chromeos) { # google_play_services_package contains the path where individual client # targets (e.g. google_play_services_base_java) are located. if (!defined(google_play_services_package)) { - if (is_chromecast && chromecast_branding != "public") { - google_play_services_package = "//chromecast/internal/android/prebuilt/google-play-services-first-party" - } else { - google_play_services_package = "//third_party/android_deps" - } - } - - if (!defined(dagger_java_target)) { - dagger_java_target = - "//third_party/android_deps:com_google_dagger_dagger_java" - } - - if (!defined(dagger_annotation_processor_target)) { - dagger_annotation_processor_target = - "//third_party/android_deps:com_google_dagger_dagger_compiler_java" - } - - if (!defined(guava_android_target)) { - guava_android_target = - "//third_party/android_deps:com_google_guava_guava_android_java" - } - - if (!defined(material_design_target)) { - material_design_target = - "//third_party/android_deps:com_google_android_material_material_java" + google_play_services_package = "//third_party/android_deps" } if (!defined(android_protoc_bin)) { @@ -171,6 +163,17 @@ if (is_android || is_chromeos) { android_ndk_version = default_android_ndk_version android_ndk_major_version = default_android_ndk_major_version + # Android API level for 32 bits platforms + android32_ndk_api_level = default_min_sdk_version + + # Android API level for 64 bits platforms + android64_ndk_api_level = default_min_sdk_version + + if (default_min_sdk_version < 21) { + # Android did not support 64 bit before API 21. + android64_ndk_api_level = 21 + } + android_sdk_root = default_android_sdk_root android_sdk_version = default_android_sdk_version android_sdk_build_tools_version = default_android_sdk_build_tools_version @@ -204,30 +207,20 @@ if (is_android || is_chromeos) { # Java debug on Android. Having this on enables multidexing, and turning it # off will enable proguard. - is_java_debug = is_debug + is_java_debug = is_debug || incremental_install # Mark APKs as android:debuggable="true". debuggable_apks = !is_official_build # Set to false to disable the Errorprone compiler. - # Defaults to false for official builds to reduce build times. - # Static analysis failures should have been already caught by normal bots. - # Disabled when fast_local_dev is turned on. - use_errorprone_java_compiler = !is_official_build && !android_fast_local_dev - - # Build incremental targets whenever possible. - # See //build/android/incremental_install/README.md for more details. - incremental_install = android_fast_local_dev + use_errorprone_java_compiler = android_static_analysis != "off" # When true, updates all android_aar_prebuilt() .info files during gn gen. # Refer to android_aar_prebuilt() for more details. update_android_aar_prebuilts = false - # Turns off android lint. Useful for prototyping or for faster local builds. - # Defaults to true for official builds to reduce build times. - # Static analysis failures should have been already caught by normal bots. - # Disabled when fast_local_dev is turned on. - disable_android_lint = is_official_build || android_fast_local_dev + # Turns off android lint. + disable_android_lint = android_static_analysis == "off" # Location of aapt2 used for app bundles. For now, a more recent version # than the one distributed with the Android SDK is required. @@ -247,8 +240,23 @@ if (is_android || is_chromeos) { # support mapping these names. enable_arsc_obfuscation = true + # Controls whether |strip_unused_resources| is respected. Useful when trying + # to analyze APKs using tools that do not support missing resources from + # resources.arsc. + enable_unused_resource_stripping = true + + # Controls whether |baseline_profile_path| is respected. Useful to disable + # baseline profiles. + # Currently disabled while bundletool does not support baseline profiles in + # non-base splits. + enable_baseline_profiles = false + # The target to use as the system WebView implementation. - system_webview_apk_target = "//android_webview:system_webview_apk" + if (android_64bit_target_cpu && skip_secondary_abi_for_cq) { + system_webview_apk_target = "//android_webview:system_webview_64_apk" + } else { + system_webview_apk_target = "//android_webview:system_webview_apk" + } # Where to write failed expectations for bots to read. expectations_failure_dir = "$root_build_dir/failed_expectations" @@ -264,7 +272,7 @@ if (is_android || is_chromeos) { } # Whether java assertions and Preconditions checks are enabled. - enable_java_asserts = is_java_debug || dcheck_always_on + enable_java_asserts = dcheck_always_on || !is_official_build # Reduce build time by using d8 incremental build. enable_incremental_d8 = true @@ -272,15 +280,19 @@ if (is_android || is_chromeos) { # Use hashed symbol names to reduce JNI symbol overhead. use_hashed_jni_names = !is_java_debug - # Desugar lambdas and interfaces methods using Desugar.jar rather than - # D8/R8. D8/R8 will still be used for backported method desugaring. - enable_bazel_desugar = true + # Enables JNI multiplexing to reduce JNI native methods overhead. + allow_jni_multiplexing = false - # Enables Java library desugaring. - # This will cause an extra classes.dex file to appear in every apk. - enable_jdk_library_desugaring = true + # Enables trace event injection on Android views with bytecode rewriting. + # This adds an additional step on android_app_bundle_module targets that + # adds trace events to some methods in android.view.View subclasses. + enable_trace_event_bytecode_rewriting = + !is_java_debug && android_channel != "stable" } + assert(!incremental_install || is_java_debug, + "incremental_install=true && is_java_debug=false is not supported.") + # Host stuff ----------------------------------------------------------------- # Defines the name the Android build gives to the current host CPU @@ -322,32 +334,33 @@ if (is_android || is_chromeos) { # like the toolchain roots. if (current_cpu == "x86") { android_prebuilt_arch = "android-x86" - _binary_prefix = "i686-linux-android" } else if (current_cpu == "arm") { android_prebuilt_arch = "android-arm" - _binary_prefix = "arm-linux-androideabi" } else if (current_cpu == "mipsel") { android_prebuilt_arch = "android-mips" - _binary_prefix = "mipsel-linux-android" } else if (current_cpu == "x64") { android_prebuilt_arch = "android-x86_64" - _binary_prefix = "x86_64-linux-android" } else if (current_cpu == "arm64") { android_prebuilt_arch = "android-arm64" - _binary_prefix = "aarch64-linux-android" } else if (current_cpu == "mips64el") { android_prebuilt_arch = "android-mips64" - _binary_prefix = "mips64el-linux-android" + } else if (current_cpu == "riscv64") { + # Place holder for riscv64 support, not tested. + android_prebuilt_arch = "android-riscv64" } else { assert(false, "Need android libgcc support for your target arch.") } android_toolchain_root = "$android_ndk_root/toolchains/llvm/prebuilt/${android_host_os}-${android_host_arch}" - android_tool_prefix = "$android_toolchain_root/bin/$_binary_prefix-" - android_readelf = "${android_tool_prefix}readelf" + android_ndk_library_path = "$android_toolchain_root/lib64" + android_tool_prefix = "$android_toolchain_root/bin/llvm-" + android_readelf = "${android_tool_prefix}readobj" android_objcopy = "${android_tool_prefix}objcopy" android_gdbserver = "$android_ndk_root/prebuilt/$android_prebuilt_arch/gdbserver/gdbserver" android_sdk_tools_bundle_aapt2 = "${android_sdk_tools_bundle_aapt2_dir}/aapt2" + + # Toolchain used to create native libraries for robolectric_binary() targets. + robolectric_toolchain = "//build/toolchain/android:robolectric_$host_cpu" } diff --git a/build/config/android/copy_ex.gni b/build/config/android/copy_ex.gni index d3705dd7ef4a..8e70c3014399 100644 --- a/build/config/android/copy_ex.gni +++ b/build/config/android/copy_ex.gni @@ -1,4 +1,4 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # diff --git a/build/config/android/create_unwind_table.gni b/build/config/android/create_unwind_table.gni new file mode 100644 index 000000000000..92b7427c47c5 --- /dev/null +++ b/build/config/android/create_unwind_table.gni @@ -0,0 +1,50 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +unwind_table_asset_v2_filename = "unwind_cfi_32_v2" + +_dump_syms_target = "//third_party/breakpad:dump_syms($host_toolchain)" +_dump_syms = get_label_info(_dump_syms_target, "root_out_dir") + "/dump_syms" +_readobj_path = "$clang_base_path/bin/llvm-readobj" + +template("unwind_table_v2") { + action(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + _output_path = + "$target_out_dir/$target_name/$unwind_table_asset_v2_filename" + + # Strip the "lib" prefix, if present. Add and then remove a space because + # our ownly tool is "replace all". + _library_name = get_label_info(invoker.library_target, "name") + _library_name = + string_replace(string_replace(" $_library_name", " $shlib_prefix", ""), + " ", + "") + _library_path = "$root_out_dir/lib.unstripped/$shlib_prefix$_library_name$shlib_extension" + + script = "//build/android/gyp/create_unwind_table.py" + outputs = [ _output_path ] + inputs = [ + _dump_syms, + _library_path, + ] + deps = [ + _dump_syms_target, + invoker.library_target, + ] + + args = [ + "--input_path", + rebase_path(_library_path, root_build_dir), + "--output_path", + rebase_path(_output_path, root_build_dir), + "--dump_syms_path", + rebase_path(_dump_syms, root_build_dir), + "--readobj_path", + rebase_path(_readobj_path, root_build_dir), + ] + } +} diff --git a/build/config/android/extract_unwind_tables.gni b/build/config/android/extract_unwind_tables.gni index 5444c5b97217..d4daa6a2aadc 100644 --- a/build/config/android/extract_unwind_tables.gni +++ b/build/config/android/extract_unwind_tables.gni @@ -1,44 +1,47 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/config/android/rules.gni") -template("unwind_table_asset") { - # Note: This file name is used in multiple monochrome build scripts. - _asset_path = "${target_gen_dir}/${target_name}/unwind_cfi_32" - _unwind_action = "${target_name}__extract" +unwind_table_asset_v1_filename = "unwind_cfi_32" - action(_unwind_action) { +_dump_syms_target = "//third_party/breakpad:dump_syms($host_toolchain)" +_dump_syms = get_label_info(_dump_syms_target, "root_out_dir") + "/dump_syms" + +template("unwind_table_v1") { + action(target_name) { forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + _output_path = + "$target_out_dir/$target_name/$unwind_table_asset_v1_filename" - _root_dir = "$root_out_dir" - if (defined(android_secondary_abi_cpu)) { - _root_dir = get_label_info(":foo($android_secondary_abi_toolchain)", - "root_out_dir") - } + # Strip the "lib" prefix, if present. Add and then remove a space because + # our ownly tool is "replace all". + _library_name = get_label_info(invoker.library_target, "name") + _library_name = + string_replace(string_replace(" $_library_name", " $shlib_prefix", ""), + " ", + "") + _library_path = "$root_out_dir/lib.unstripped/$shlib_prefix$_library_name$shlib_extension" script = "//build/android/gyp/extract_unwind_tables.py" - outputs = [ _asset_path ] - inputs = [ "${_root_dir}/lib.unstripped/$shlib_prefix${invoker.library_target}$shlib_extension" ] + outputs = [ _output_path ] + inputs = [ + _dump_syms, + _library_path, + ] + deps = [ + _dump_syms_target, + invoker.library_target, + ] args = [ "--input_path", - rebase_path( - "${_root_dir}/lib.unstripped/$shlib_prefix${invoker.library_target}$shlib_extension", - root_build_dir), + rebase_path(_library_path, root_build_dir), "--output_path", - rebase_path(_asset_path, root_build_dir), + rebase_path(_output_path, root_build_dir), "--dump_syms_path", - rebase_path("$root_out_dir/dump_syms", root_build_dir), + rebase_path(_dump_syms, root_build_dir), ] - deps = invoker.deps - deps += [ "//third_party/breakpad:dump_syms" ] - } - android_assets(target_name) { - forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) - sources = [ _asset_path ] - disable_compression = true - deps = [ ":$_unwind_action" ] } } diff --git a/build/config/android/internal_rules.gni b/build/config/android/internal_rules.gni index f1d1faa7be28..427fa0dca401 100644 --- a/build/config/android/internal_rules.gni +++ b/build/config/android/internal_rules.gni @@ -1,11 +1,11 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # Do not add any imports to non-//build directories here. # Some projects (e.g. V8) do not have non-build directories DEPS'ed in. +import("//build/config/android/channel.gni") import("//build/config/android/config.gni") -import("//build/config/android/copy_ex.gni") import("//build/config/compiler/compiler.gni") import("//build/config/compute_inputs_for_analyze.gni") import("//build/config/coverage/coverage.gni") @@ -15,11 +15,20 @@ import("//build/toolchain/goma.gni") import("//build/toolchain/kythe.gni") import("//build/util/generate_wrapper.gni") import("//build_overrides/build.gni") -if (is_starboardized_toolchain) { +if (is_starboardized_toolchain || current_toolchain == default_toolchain) { import("//build/toolchain/concurrent_links.gni") } assert(is_android) +default_android_sdk_dep = "//third_party/android_sdk:android_sdk_java" +_jacoco_dep = "//third_party/jacoco:jacocoagent_java" +_jacoco_host_jar = + "$root_build_dir/lib.java/third_party/jacoco/jacocoagent_java.jar" +_robolectric_libs_dir = + rebase_path( + get_label_info("//:foo($robolectric_toolchain)", "root_out_dir"), + root_build_dir) + # The following _java_*_types variables capture all the existing target types. # If a new type is introduced, please add it to one of these categories, # preferring the more specific resource/library types. @@ -43,102 +52,64 @@ _java_leaf_types = [ "dist_jar", "java_annotation_processor", "java_binary", - "junit_binary", + "robolectric_binary", ] # All _java_resource_types targets must conform to these patterns. -_java_resource_patterns = [ - "*:*_assets", +java_resource_patterns = [ + "*_assets", + "*_grd", + "*_java_strings", + "*locale_paks", + "*_resources", + "*strings_java", "*android*:assets", "*:*_apk_*resources", "*android*:resources", - "*:*_resources", - "*:*_grd", - "*:*locale_paks", - "*:*_java_strings", - "*:*strings_java", ] # All _java_library_types targets must conform to these patterns. This includes # all non-leaf targets that use java_library_impl. -_java_library_patterns = [ - "*:*_java", - "*:*_javalib", - "*:*_java_*", # e.g. chrome_java_test_support +java_library_patterns = [ + "*_java", + "*_javalib", + "*javatests", + "*_bundle_module", + "*:*_java_*", # E.g. chrome_java_test_support "*:java", + "*/java", # to allow filtering without expanding labels //a/java -> + # //a/java:java "*:junit", + "*/junit", "*:junit_*", "*:*_junit_*", - "*:*javatests", - "*:*_bundle_module", - # TODO(agrieve): Rename targets below to match above patterns. + # TODO(agrieve): Rename to glue_java + "//android_webview/glue", "//android_webview/glue:glue", ] -# These identify all non-leaf targets that have .build_config files. This is the +# These identify all non-leaf targets that have .build_config.json files. This is the # set of patterns that other targets can use to filter out java targets. -java_target_patterns = _java_library_patterns + _java_resource_patterns +java_target_patterns = java_library_patterns + java_resource_patterns _r8_path = "//third_party/r8/lib/r8.jar" + +# This duplication is intentional, so we avoid updating the r8.jar used by +# dexing unless necessary, since each update invalidates all incremental dexing +# and unnecessarily slows down all bots. +_d8_path = "//third_party/r8/d8/lib/r8.jar" _custom_d8_path = "//third_party/r8/custom_d8.jar" -_desugar_jdk_libs_json = "//third_party/r8/desugar_jdk_libs.json" -_desugar_jdk_libs_jar = "//third_party/android_deps/libs/com_android_tools_desugar_jdk_libs/desugar_jdk_libs-1.1.1.jar" -_desugar_jdk_libs_configuration_jar = "//third_party/android_deps/libs/com_android_tools_desugar_jdk_libs_configuration/desugar_jdk_libs_configuration-1.1.1.jar" -_desugar_runtime_jar = "$root_build_dir/obj/third_party/bazel/desugar/Desugar_runtime.processed.jar" - -_dexdump_path = "$android_sdk_build_tools/dexdump" -_dexlayout_path = "//third_party/android_build_tools/art/dexlayout" -_profman_path = "//third_party/android_build_tools/art/profman" -_art_lib_file_names = [ - "libartbase.so", - "libart-compiler.so", - "libart-dexlayout.so", - "libart-disassembler.so", - "libart-gtest.so", - "libart.so", - "libbacktrace.so", - "libbase.so", - "libcrypto-host.so", - "libc++.so", - "libcutils.so", - "libdexfile.so", - "libexpat-host.so", - "libicui18n-host.so", - "libicuuc-host.so", - "libjavacore.so", - "libjavacrypto.so", - "liblog.so", - "liblz4.so", - "liblzma.so", - "libnativebridge.so", - "libnativehelper.so", - "libnativeloader.so", - "libopenjdkjvm.so", - "libopenjdkjvmti.so", - "libopenjdk.so", - "libprofile.so", - "libsigchain.so", - "libssl-host.so", - "libunwindstack.so", - "libvixl-arm64.so", - "libvixl-arm.so", - "libvixld-arm64.so", - "libvixld-arm.so", - "libz-host.so", - "libziparchive.so", - "slicer.so", -] -_default_art_libs = [] -foreach(lib, _art_lib_file_names) { - _default_art_libs += [ "//third_party/android_build_tools/art/lib/$lib" ] -} +_default_lint_jar_path = "//third_party/android_build_tools/lint/lint.jar" +_custom_lint_jar_path = "//third_party/android_build_tools/lint/custom_lint.jar" +_manifest_merger_jar_path = + "//third_party/android_build_tools/manifest_merger/manifest-merger.jar" # Put the bug number in the target name so that false-positives have a hint in # the error message about why non-existent dependencies are there. build_config_target_suffix = "__build_config_crbug_908819" -# Write the target's .build_config file. This is a json file that contains a +# Write the target's .build_config.json file. This is a json file that contains a # dictionary of information about how to build this target (things that # require knowledge about this target's dependencies and cannot be calculated # at gn-time). There is a special syntax to add a value in that dictionary to @@ -153,16 +124,15 @@ template("write_build_config") { _target_label = get_label_info(":${_parent_invoker.target_name}", "label_no_toolchain") - # Ensure targets match naming patterns so that __assetres, __header, __impl - # targets work properly. Those generated targets allow for effective deps - # filtering. + # Ensure targets match naming patterns so that __assetres, __header, __host, + # and __validate targets work properly. if (filter_exclude([ _type ], _java_resource_types) == []) { - if (filter_exclude([ _target_label ], _java_resource_patterns) != []) { + if (filter_exclude([ _target_label ], java_resource_patterns) != []) { assert(false, "Invalid java resource target name: $_target_label") } } else if (filter_exclude([ _type ], _java_library_types) == []) { - if (filter_exclude([ _target_label ], _java_library_patterns) != [] || - filter_exclude([ _target_label ], _java_resource_patterns) == []) { + if (filter_exclude([ _target_label ], java_library_patterns) != [] || + filter_exclude([ _target_label ], java_resource_patterns) == []) { assert(false, "Invalid java library target name: $_target_label") } } else if (_type == "group") { @@ -196,28 +166,45 @@ template("write_build_config") { outputs = [ invoker.build_config ] _deps_configs = [] - _public_deps_configs = [] if (defined(invoker.possible_config_deps)) { foreach(_possible_dep, invoker.possible_config_deps) { _dep_label = get_label_info(_possible_dep, "label_no_toolchain") if (filter_exclude([ _dep_label ], java_target_patterns) == []) { - # Put the bug number in the target name so that false-positives - # have a hint in the error message about non-existent dependencies. deps += [ "$_dep_label$build_config_target_suffix" ] _dep_gen_dir = get_label_info(_possible_dep, "target_gen_dir") _dep_name = get_label_info(_possible_dep, "name") - _dep_config = "$_dep_gen_dir/$_dep_name.build_config" + _dep_config = "$_dep_gen_dir/$_dep_name.build_config.json" _deps_configs += [ _dep_config ] - if (defined(invoker.possible_config_public_deps)) { - if (filter_include([ _possible_dep ], - invoker.possible_config_public_deps) != []) { - _public_deps_configs += [ _dep_config ] - } - } } } } + _public_deps_configs = [] + if (defined(invoker.possible_config_public_deps)) { + foreach(_possible_dep, invoker.possible_config_public_deps) { + _dep_label = get_label_info(_possible_dep, "label_no_toolchain") + + # E.g. Adding an action that generates a .java file that is then + # consumed by a subsequent java_library() target would not work + # because the libraries depend only on the nested targets of one + # another. It is simplest to just ban non-java public_deps. + assert(filter_exclude([ _dep_label ], java_target_patterns) == [], + "Only java_library targets can be used as public_deps. " + + "Found:\n${_dep_label}\non Target:\n" + + get_label_info(":$target_name", "label_no_toolchain")) + + # Put the bug number in the target name so that false-positives + # have a hint in the error message about non-existent dependencies. + deps += [ "$_dep_label$build_config_target_suffix" ] + _dep_gen_dir = get_label_info(_possible_dep, "target_gen_dir") + _dep_name = get_label_info(_possible_dep, "name") + _dep_config = "$_dep_gen_dir/$_dep_name.build_config.json" + + _public_deps_configs += [ _dep_config ] + } + } + inputs += _deps_configs + inputs += _public_deps_configs _rebased_deps_configs = rebase_path(_deps_configs, root_build_dir) _rebased_public_deps_configs = rebase_path(_public_deps_configs, root_build_dir) @@ -234,9 +221,8 @@ template("write_build_config") { _target_label, ] - if (defined(invoker.ignore_dependency_public_deps) && - invoker.ignore_dependency_public_deps) { - args += [ "--ignore-dependency-public-deps" ] + if (defined(invoker.preferred_dep) && invoker.preferred_dep) { + args += [ "--preferred-dep" ] } if (defined(invoker.aar_path)) { @@ -275,6 +261,12 @@ template("write_build_config") { rebase_path(invoker.ijar_path, root_build_dir), ] } + if (defined(invoker.kotlinc_jar_path)) { + args += [ + "--kotlinc-jar-path", + rebase_path(invoker.kotlinc_jar_path, root_build_dir), + ] + } if (defined(invoker.java_resources_jar)) { args += [ "--java-resources-jar-path", @@ -284,15 +276,16 @@ template("write_build_config") { if (defined(invoker.annotation_processor_deps) && invoker.annotation_processor_deps != []) { _processor_configs = [] - foreach(_processor_dep, invoker.annotation_processor_deps) { - _dep_label = get_label_info(_processor_dep, "label_no_toolchain") - _dep_gen_dir = get_label_info(_processor_dep, "target_gen_dir") - _dep_name = get_label_info(_processor_dep, "name") + foreach(_dep_label, invoker.annotation_processor_deps) { deps += [ "$_dep_label$build_config_target_suffix" ] - _processor_configs += [ "$_dep_gen_dir/$_dep_name.build_config" ] + _dep_gen_dir = get_label_info(_dep_label, "target_gen_dir") + _dep_name = get_label_info(_dep_label, "name") + _dep_config = "$_dep_gen_dir/$_dep_name.build_config.json" + _processor_configs += [ _dep_config ] } _rebased_processor_configs = rebase_path(_processor_configs, root_build_dir) + inputs += _processor_configs args += [ "--annotation-processor-configs=$_rebased_processor_configs" ] } @@ -324,17 +317,20 @@ template("write_build_config") { invoker.bypass_platform_checks) { args += [ "--bypass-platform-checks" ] } + if (defined(invoker.is_robolectric) && invoker.is_robolectric) { + args += [ "--is-robolectric" ] + } if (defined(invoker.apk_under_test)) { - deps += [ "${invoker.apk_under_test}$build_config_target_suffix" ] - apk_under_test_gen_dir = - get_label_info(invoker.apk_under_test, "target_gen_dir") - apk_under_test_name = get_label_info(invoker.apk_under_test, "name") - apk_under_test_config = - "$apk_under_test_gen_dir/$apk_under_test_name.build_config" + _dep_label = invoker.apk_under_test + _dep_gen_dir = get_label_info(_dep_label, "target_gen_dir") + _dep_name = get_label_info(_dep_label, "name") + _dep_config = "$_dep_gen_dir/$_dep_name.build_config.json" + inputs += [ _dep_config ] + deps += [ "$_dep_label$build_config_target_suffix" ] args += [ "--tested-apk-config", - rebase_path(apk_under_test_config, root_build_dir), + rebase_path(_dep_config, root_build_dir), ] } @@ -360,6 +356,12 @@ template("write_build_config") { args += [ "--treat-as-locale-paks" ] } + if (defined(invoker.merged_android_manifest)) { + args += [ + "--merged-android-manifest", + rebase_path(invoker.merged_android_manifest, root_build_dir), + ] + } if (defined(invoker.android_manifest)) { inputs += [ invoker.android_manifest ] args += [ @@ -432,10 +434,6 @@ template("write_build_config") { ] } - if (defined(invoker.is_base_module) && invoker.is_base_module) { - args += [ "--is-base-module" ] - } - if (defined(invoker.loadable_modules)) { _rebased_loadable_modules = rebase_path(invoker.loadable_modules, root_build_dir) @@ -470,19 +468,10 @@ template("write_build_config") { args += [ "--secondary-native-lib-placeholders=${invoker.secondary_native_lib_placeholders}" ] } - if (defined(invoker.uncompress_shared_libraries) && - invoker.uncompress_shared_libraries) { - args += [ "--uncompress-shared-libraries" ] - } - if (defined(invoker.library_always_compress)) { args += [ "--library-always-compress=${invoker.library_always_compress}" ] } - if (defined(invoker.library_renames)) { - args += [ "--library-renames=${invoker.library_renames}" ] - } - if (defined(invoker.apk_path)) { # TODO(tiborg): Remove APK path from build config and use # install_artifacts from metadata instead. @@ -500,10 +489,10 @@ template("write_build_config") { } } - if (defined(invoker.java_sources_file)) { + if (defined(invoker.target_sources_file)) { args += [ - "--java-sources-file", - rebase_path(invoker.java_sources_file, root_build_dir), + "--target-sources-file", + rebase_path(invoker.target_sources_file, root_build_dir), ] } if (defined(invoker.srcjar)) { @@ -546,20 +535,6 @@ template("write_build_config") { rebase_path(invoker.proguard_configs, root_build_dir) args += [ "--proguard-configs=$_rebased_proguard_configs" ] } - if (defined(invoker.static_library_dependent_targets)) { - _dependent_configs = [] - foreach(_dep, invoker.static_library_dependent_targets) { - _dep_name = _dep.name - _dep_label = get_label_info(_dep_name, "label_no_toolchain") - deps += [ "$_dep_label$build_config_target_suffix" ] - _dep_gen_dir = get_label_info(_dep_name, "target_gen_dir") - _dep_name = get_label_info(_dep_name, "name") - _config = - rebase_path("$_dep_gen_dir/$_dep_name.build_config", root_build_dir) - _dependent_configs += [ _config ] - } - args += [ "--static-library-dependent-configs=$_dependent_configs" ] - } if (defined(invoker.gradle_treat_as_prebuilt) && invoker.gradle_treat_as_prebuilt) { args += [ "--gradle-treat-as-prebuilt" ] @@ -571,24 +546,76 @@ template("write_build_config") { ] } if (defined(invoker.base_module_target)) { - _base_label = - get_label_info(invoker.base_module_target, "label_no_toolchain") - _dep_gen_dir = get_label_info(_base_label, "target_gen_dir") - _dep_name = get_label_info(_base_label, "name") - deps += [ "$_base_label$build_config_target_suffix" ] - _base_module_build_config = "$_dep_gen_dir/$_dep_name.build_config" - inputs += [ _base_module_build_config ] + _dep_label = invoker.base_module_target + _dep_gen_dir = get_label_info(_dep_label, "target_gen_dir") + _dep_name = get_label_info(_dep_label, "name") + _dep_config = "$_dep_gen_dir/$_dep_name.build_config.json" + deps += [ "$_dep_label$build_config_target_suffix" ] + inputs += [ _dep_config ] args += [ "--base-module-build-config", - rebase_path(_base_module_build_config, root_build_dir), + rebase_path(_dep_config, root_build_dir), + ] + } + if (defined(invoker.parent_module_target)) { + _dep_label = invoker.parent_module_target + _dep_gen_dir = get_label_info(_dep_label, "target_gen_dir") + _dep_name = get_label_info(_dep_label, "name") + _dep_config = "$_dep_gen_dir/$_dep_name.build_config.json" + deps += [ "$_dep_label$build_config_target_suffix" ] + inputs += [ _dep_config ] + args += [ + "--parent-module-build-config", + rebase_path(_dep_config, root_build_dir), + ] + } + if (defined(invoker.module_name)) { + args += [ + "--module-name", + invoker.module_name, ] } + if (defined(invoker.modules)) { + foreach(_module, invoker.modules) { + if (defined(_module.uses_split)) { + args += [ "--uses-split=${_module.name}:${_module.uses_split}" ] + } + } + } if (defined(invoker.module_build_configs)) { inputs += invoker.module_build_configs _rebased_configs = rebase_path(invoker.module_build_configs, root_build_dir) args += [ "--module-build-configs=$_rebased_configs" ] } + if (defined(invoker.add_view_trace_events) && + invoker.add_view_trace_events) { + # Adding trace events involves rewriting bytecode and generating a new set + # of jar files. In order to avoid conflicts between bundles we save the + # new jars in a bundle specific gen/ directory. The build config for the + # bundle, and each one of its modules need a path to a bundle specific + # gen/ directory in order to generate a list of rewritten jar paths. + # We use the base module's target_gen_dir because non-base modules and the + # app bundle targets have a reference to it (base_module_target). + if (_type == "android_app_bundle") { + _trace_events_target_name = + get_label_info(_parent_invoker.base_module_target, "name") + } else if (defined(invoker.base_module_target)) { + _trace_events_target_name = + get_label_info(invoker.base_module_target, "name") + } else { + _grandparent_invoker = _parent_invoker.invoker + _trace_events_target_name = _grandparent_invoker.target_name + } + + # FIXME: This should likely be using the base module's target_out_dir + # rather than the current target's. + args += [ + "--trace-events-jar-dir", + rebase_path("$target_out_dir/$_trace_events_target_name", + root_build_dir), + ] + } if (defined(invoker.version_name)) { args += [ "--version-name", @@ -644,22 +671,6 @@ template("generate_android_wrapper") { template("generate_r_java") { action_with_pydeps(target_name) { forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ]) - if (!defined(deps)) { - deps = [] - } - if (defined(invoker.possible_resource_deps)) { - foreach(_dep, invoker.possible_resource_deps) { - _target_label = get_label_info(_dep, "label_no_toolchain") - if (filter_exclude([ _target_label ], _java_library_patterns) == [] && - filter_exclude([ _target_label ], _java_resource_patterns) != []) { - # Depend on the java libraries' transitive __assetres target instead. - # This is required to ensure depending on java_groups works. - deps += [ "${_target_label}__assetres" ] - } else { - deps += [ _dep ] - } - } - } depfile = "$target_gen_dir/${invoker.target_name}.d" inputs = [ invoker.build_config ] outputs = [ invoker.srcjar_path ] @@ -682,6 +693,7 @@ template("test_runner_script") { testonly = true _test_name = invoker.test_name _test_type = invoker.test_type + _is_unit_test = defined(invoker.is_unit_test) && invoker.is_unit_test _incremental_apk = defined(invoker.incremental_apk) && invoker.incremental_apk _runtime_deps = @@ -715,7 +727,8 @@ template("test_runner_script") { if (defined(invoker.apk_under_test)) { _install_artifacts_json = "${target_gen_dir}/${target_name}.install_artifacts" - generated_file("${target_name}__install_artifacts") { + _install_artifacts_target_name = "${target_name}__install_artifacts" + generated_file(_install_artifacts_target_name) { deps = [ invoker.apk_under_test ] output_conversion = "json" outputs = [ _install_artifacts_json ] @@ -726,12 +739,18 @@ template("test_runner_script") { } generate_android_wrapper(target_name) { + forward_variables_from(invoker, + [ + "assert_no_deps", + "public_deps", + "visibility", + ]) wrapper_script = "$root_build_dir/bin/run_${_test_name}" executable = "//testing/test_env.py" - if (defined(android_test_runner_script)) { - _runner_script = android_test_runner_script + if (defined(invoker.android_test_runner_script)) { + _runner_script = invoker.android_test_runner_script } else { _runner_script = "//build/android/test_runner.py" } @@ -741,9 +760,12 @@ template("test_runner_script") { deps = invoker.deps } data_deps = [ - "//build/android:test_runner_py", + "//build/android:test_runner_core_py", "//testing:test_scripts_shared", ] + if (_test_type != "junit") { + data_deps += [ "//build/android:test_runner_device_support" ] + } if (defined(invoker.data_deps)) { data_deps += invoker.data_deps } @@ -757,8 +779,13 @@ template("test_runner_script") { _test_type, "--output-directory", "@WrappedPath(.)", + "--wrapper-script-args", ] + if (_is_unit_test) { + executable_args += [ "--is-unit-test" ] + } + if (_runtime_deps) { deps += [ ":$_runtime_deps_target" ] data += [ _runtime_deps_file ] @@ -777,7 +804,7 @@ template("test_runner_script") { deps += [ "${invoker.apk_target}$build_config_target_suffix" ] _apk_build_config = get_label_info(invoker.apk_target, "target_gen_dir") + "/" + - get_label_info(invoker.apk_target, "name") + ".build_config" + get_label_info(invoker.apk_target, "name") + ".build_config.json" _rebased_apk_build_config = rebase_path(_apk_build_config, root_build_dir) not_needed([ "_rebased_apk_build_config" ]) } else if (_test_type == "gtest") { @@ -814,24 +841,22 @@ template("test_runner_script") { if (_incremental_apk) { _test_apk = "@WrappedPath(@FileArg($_rebased_apk_build_config:deps_info:incremental_apk_path))" } - _rebased_test_jar = rebase_path(invoker.test_jar, root_build_dir) executable_args += [ "--test-apk", _test_apk, - "--test-jar", - "@WrappedPath(${_rebased_test_jar})", ] if (defined(invoker.apk_under_test)) { if (_incremental_apk) { deps += [ "${invoker.apk_under_test}$build_config_target_suffix" ] _apk_under_test_build_config = get_label_info(invoker.apk_under_test, "target_gen_dir") + "/" + - get_label_info(invoker.apk_under_test, "name") + ".build_config" + get_label_info(invoker.apk_under_test, "name") + + ".build_config.json" _rebased_apk_under_test_build_config = rebase_path(_apk_under_test_build_config, root_build_dir) _apk_under_test = "@WrappedPath(@FileArg($_rebased_apk_under_test_build_config:deps_info:incremental_apk_path))" } else { - deps += [ ":${target_name}__install_artifacts" ] + deps += [ ":${_install_artifacts_target_name}" ] _rebased_install_artifacts_json = rebase_path(_install_artifacts_json, root_build_dir) _apk_under_test = @@ -847,16 +872,25 @@ template("test_runner_script") { _build_config = get_label_info(invoker.use_webview_provider, "target_gen_dir") + "/" + get_label_info(invoker.use_webview_provider, "name") + - ".build_config" + ".build_config.json" _rebased_build_config = rebase_path(_build_config, root_build_dir) executable_args += [ "--use-webview-provider", "@WrappedPath(@FileArg($_rebased_build_config:deps_info:apk_path))", ] } - if (defined(invoker.proguard_enabled) && invoker.proguard_enabled && - !_incremental_apk) { - executable_args += [ "--enable-java-deobfuscation" ] + if (defined(invoker.proguard_mapping_path)) { + if (_incremental_apk) { + not_needed(invoker, [ "proguard_mapping_path" ]) + } else { + data += [ invoker.proguard_mapping_path ] + _rebased_mapping_path = + rebase_path(invoker.proguard_mapping_path, root_build_dir) + executable_args += [ + "--proguard-mapping-path", + "@WrappedPath($_rebased_mapping_path)", + ] + } } if (use_jacoco_coverage) { # Set a default coverage output directory (can be overridden by user @@ -874,15 +908,17 @@ template("test_runner_script") { executable_args += [ "--test-suite", invoker.test_suite, + "--native-libs-dir", + "@WrappedPath($_robolectric_libs_dir)", ] + # Test runner uses this generated wrapper script. + data += [ "$root_build_dir/bin/helper/${invoker.test_suite}" ] + deps += [ ":${invoker.test_suite}$build_config_target_suffix" ] - _junit_binary_build_config = - "${target_gen_dir}/${invoker.test_suite}.build_config" _rebased_robolectric_runtime_deps_dir = - rebase_path("$root_build_dir/lib.java/third_party/robolectric", - root_build_dir) + rebase_path("//third_party/robolectric/lib", root_build_dir) _rebased_resource_apk = rebase_path(invoker.resource_apk, root_build_dir) executable_args += [ "--resource-apk", @@ -912,8 +948,9 @@ template("test_runner_script") { if (defined(invoker.additional_apks)) { foreach(additional_apk, invoker.additional_apks) { deps += [ "$additional_apk$build_config_target_suffix" ] - _build_config = get_label_info(additional_apk, "target_gen_dir") + "/" + - get_label_info(additional_apk, "name") + ".build_config" + _build_config = + get_label_info(additional_apk, "target_gen_dir") + "/" + + get_label_info(additional_apk, "name") + ".build_config.json" _rebased_build_config = rebase_path(_build_config, root_build_dir) executable_args += [ "--additional-apk", @@ -978,9 +1015,6 @@ if (enable_java_templates) { template("android_lint") { action_with_pydeps(target_name) { forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) - if (!defined(deps)) { - deps = [] - } # https://crbug.com/1098752 Fix for bot OOM (https://crbug.com/1098333). if (defined(java_cmd_pool_size)) { @@ -992,22 +1026,23 @@ if (enable_java_templates) { # Lint requires generated sources and generated resources from the build. # Turbine __header targets depend on all generated sources, and the # __assetres targets depend on all generated resources. + deps = [] if (defined(invoker.deps)) { - foreach(_dep, invoker.deps) { - _target_label = get_label_info(_dep, "label_no_toolchain") - if (filter_exclude([ _target_label ], _java_library_patterns) == [] && - filter_exclude([ _target_label ], _java_resource_patterns) != - []) { - deps += [ - "${_target_label}__assetres", - "${_target_label}__header", - ] - } else { - # Keep non-java deps as they may generate files used only by lint. - # e.g. generated suppressions.xml files. - deps += [ _dep ] - } + _lib_deps = + filter_exclude(filter_include(invoker.deps, java_library_patterns), + java_resource_patterns) + foreach(_lib_dep, _lib_deps) { + # Expand //foo/java -> //foo/java:java + _lib_dep = get_label_info(_lib_dep, "label_no_toolchain") + deps += [ + "${_lib_dep}__assetres", + "${_lib_dep}__header", + ] } + + # Keep non-java deps as they may generate files used only by lint. + # e.g. generated suppressions.xml files. + deps += filter_exclude(invoker.deps, _lib_deps) } if (defined(invoker.min_sdk_version)) { @@ -1016,7 +1051,12 @@ if (enable_java_templates) { _min_sdk_version = default_min_sdk_version } - _lint_binary_path = "$lint_android_sdk_root/cmdline-tools/latest/bin/lint" + if (defined(invoker.lint_jar_path)) { + _lint_jar_path = invoker.lint_jar_path + } else { + _lint_jar_path = _default_lint_jar_path + } + _cache_dir = "$root_build_dir/android_lint_cache" # Save generated xml files in a consistent location for debugging. @@ -1026,17 +1066,20 @@ if (enable_java_templates) { script = "//build/android/gyp/lint.py" depfile = "$target_gen_dir/$target_name.d" inputs = [ - _lint_binary_path, + _lint_jar_path, + _custom_lint_jar_path, _backported_methods, ] args = [ "--target-name", - get_label_info(target_name, "label_no_toolchain"), + get_label_info(":${target_name}", "label_no_toolchain"), "--depfile", rebase_path(depfile, root_build_dir), - "--lint-binary-path", - rebase_path(_lint_binary_path, root_build_dir), + "--lint-jar-path", + rebase_path(_lint_jar_path, root_build_dir), + "--custom-lint-jar-path", + rebase_path(_custom_lint_jar_path, root_build_dir), "--cache-dir", rebase_path(_cache_dir, root_build_dir), "--lint-gen-dir", @@ -1052,6 +1095,8 @@ if (enable_java_templates) { if (defined(invoker.skip_build_server) && invoker.skip_build_server) { # Nocompile tests need lint to fail through ninja. args += [ "--skip-build-server" ] + } else if (android_static_analysis == "build_server") { + args += [ "--use-build-server" ] } if (defined(invoker.lint_suppressions_file)) { @@ -1113,7 +1158,7 @@ if (enable_java_templates) { # Lint requires all source and all resource files to be passed in the # same invocation for checks like UnusedResources. - "--java-sources=@FileArg($_rebased_build_config:deps_info:lint_java_sources)", + "--sources=@FileArg($_rebased_build_config:deps_info:lint_sources)", "--aars=@FileArg($_rebased_build_config:deps_info:lint_aars)", "--srcjars=@FileArg($_rebased_build_config:deps_info:lint_srcjars)", "--resource-sources=@FileArg($_rebased_build_config:deps_info:lint_resource_sources)", @@ -1133,12 +1178,7 @@ if (enable_java_templates) { } template("proguard") { - forward_variables_from(invoker, - TESTONLY_AND_VISIBILITY + [ - "data", - "data_deps", - "public_deps", - ]) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) _script = "//build/android/gyp/proguard.py" _deps = invoker.deps @@ -1155,13 +1195,25 @@ if (enable_java_templates) { _mapping_path = "${invoker.output_path}.mapping" } - _enable_jdk_library_desugaring = enable_jdk_library_desugaring - if (defined(invoker.supports_jdk_library_desugaring) && - !invoker.supports_jdk_library_desugaring) { - _enable_jdk_library_desugaring = false + _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) + + # This is generally the apk name, and serves to identify the mapping + # file that would be required to deobfuscate a stacktrace. + _mapping_basename = get_path_info(_mapping_path, "name") + _version_code = "@FileArg($_rebased_build_config:deps_info:version_code)" + _package_name = "@FileArg($_rebased_build_config:deps_info:package_name)" + if (defined(invoker.package_name)) { + _package_name = invoker.package_name + } + if (defined(invoker.version_code)) { + _version_code = invoker.version_code } - _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) + # The Mapping ID is parsed to when uploading mapping files. + # See: https://crbug.com/1417308 + _source_file_template = + "chromium-$_mapping_basename-$android_channel-$_version_code" + _args = [ "--mapping-output", rebase_path(_mapping_path, root_build_dir), @@ -1171,33 +1223,17 @@ if (enable_java_templates) { "@FileArg($_rebased_build_config:android:sdk_jars)", "--r8-path", rebase_path(_r8_path, root_build_dir), + "--package-name=$_package_name", + "--source-file", + _source_file_template, ] if (treat_warnings_as_errors) { _args += [ "--warnings-as-errors" ] } - if (defined(invoker.desugar_jars_paths)) { - _rebased_desugar_jars_paths = - rebase_path(invoker.desugar_jars_paths, root_build_dir) - args += [ "--classpath=${_rebased_desugar_jars_paths}" ] - } if ((!defined(invoker.proguard_enable_obfuscation) || invoker.proguard_enable_obfuscation) && enable_proguard_obfuscation) { - _proguard_sourcefile_suffix = "" - if (defined(invoker.proguard_sourcefile_suffix)) { - _proguard_sourcefile_suffix = "-${invoker.proguard_sourcefile_suffix}" - } - - # This is generally the apk name, and serves to identify the mapping - # file that would be required to deobfuscate a stacktrace. - _mapping_id = get_path_info(_mapping_path, "name") - _args += [ - "--enable-obfuscation", - "--sourcefile", - "chromium-${_mapping_id}${_proguard_sourcefile_suffix}", - ] - } else if (defined(invoker.proguard_sourcefile_suffix)) { - not_needed(invoker, [ "proguard_sourcefile_suffix" ]) + _args += [ "--enable-obfuscation" ] } if (defined(invoker.modules)) { @@ -1207,8 +1243,17 @@ if (enable_java_templates) { _args += [ "--feature-name=${_feature_module.name}", "--dex-dest=@FileArg($_rebased_module_build_config:final_dex:path)", - "--feature-jars=@FileArg($_rebased_module_build_config:deps_info:device_classpath)", ] + + # The bundle's build config has the correct classpaths - the individual + # modules' build configs may double-use some jars. + if (defined(invoker.add_view_trace_events) && + invoker.add_view_trace_events) { + _args += [ "--feature-jars=@FileArg($_rebased_build_config:modules:${_feature_module.name}:trace_event_rewritten_device_classpath)" ] + } else { + _args += [ "--feature-jars=@FileArg($_rebased_build_config:modules:${_feature_module.name}:device_classpath)" ] + } + if (defined(_feature_module.uses_split)) { _args += [ "--uses-split=${_feature_module.name}:${_feature_module.uses_split}" ] } @@ -1232,55 +1277,24 @@ if (enable_java_templates) { } _outputs += [ _mapping_path ] - if (defined(invoker.disable_r8_outlining) && invoker.disable_r8_outlining) { - _args += [ "--disable-outlining" ] - } - if (defined(invoker.enable_proguard_checks) && !invoker.enable_proguard_checks) { _args += [ "--disable-checks" ] } - if (defined(invoker.is_static_library) && invoker.is_static_library) { - _args += [ - "--extra-mapping-output-paths", - "@FileArg($_rebased_build_config:deps_info:static_library_proguard_mapping_output_paths)", - ] - } - - if (_enable_jdk_library_desugaring) { - _args += [ - "--desugar-jdk-libs-json", - rebase_path(_desugar_jdk_libs_json, root_build_dir), - ] - _inputs += [ _desugar_jdk_libs_json ] - - _args += [ - "--desugar-jdk-libs-jar", - rebase_path(_desugar_jdk_libs_jar, root_build_dir), - "--desugar-jdk-libs-configuration-jar", - rebase_path(_desugar_jdk_libs_configuration_jar, root_build_dir), - ] - _inputs += [ - _desugar_jdk_libs_jar, - _desugar_jdk_libs_configuration_jar, - ] - - _desugared_library_keep_rule_output_path = - "$target_gen_dir/$target_name.desugared_library_keep_rules.flags" - _args += [ - "--desugared-library-keep-rule-output", - rebase_path(_desugared_library_keep_rule_output_path, root_build_dir), - ] - } _ignore_desugar_missing_deps = defined(invoker.ignore_desugar_missing_deps) && invoker.ignore_desugar_missing_deps - if (!_ignore_desugar_missing_deps && !enable_bazel_desugar) { + if (!_ignore_desugar_missing_deps) { _args += [ "--show-desugar-default-interface-warnings" ] } - if (enable_java_asserts) { + if (defined(invoker.custom_assertion_handler)) { + _args += [ + "--assertion-handler", + invoker.custom_assertion_handler, + ] + } else if (enable_java_asserts) { # The default for generating dex file format is # --force-disable-assertions. _args += [ "--force-enable-assertions" ] @@ -1337,6 +1351,12 @@ if (enable_java_templates) { _deps += [ ":$_expectations_target" ] } action_with_pydeps(target_name) { + forward_variables_from(invoker, + [ + "data", + "data_deps", + "public_deps", + ]) script = _script deps = _deps inputs = _inputs @@ -1360,7 +1380,7 @@ if (enable_java_templates) { # # Variables # main_class: The class containing the program entry point. - # build_config: Path to .build_config for the jar (contains classpath). + # build_config: Path to .build_config.json for the jar (contains classpath). # script_name: Name of the script to generate. # wrapper_script_args: List of extra arguments to pass to the executable. # tiered_stop_at_level_one: Whether to pass --tiered-stop-at-level-one @@ -1372,6 +1392,11 @@ if (enable_java_templates) { _main_class = invoker.main_class _build_config = invoker.build_config _script_name = invoker.script_name + if (defined(invoker.max_heap_size)) { + _max_heap_size = invoker.max_heap_size + } else { + _max_heap_size = "1G" + } script = "//build/android/gyp/create_java_binary_script.py" inputs = [ _build_config ] @@ -1383,28 +1408,37 @@ if (enable_java_templates) { rebase_path(_java_script, root_build_dir), "--main-class", _main_class, - ] - args += [ "--classpath=@FileArg($_rebased_build_config:deps_info:host_classpath)", + "--max-heap-size=$_max_heap_size", ] + data = [] if (use_jacoco_coverage) { args += [ "--classpath", - rebase_path("//third_party/jacoco/lib/jacocoagent.jar", - root_build_dir), + rebase_path(_jacoco_host_jar, root_build_dir), ] - } - if (use_jacoco_coverage || !treat_warnings_as_errors) { - args += [ "--noverify" ] + data += [ _jacoco_host_jar ] } if (defined(invoker.tiered_stop_at_level_one) && invoker.tiered_stop_at_level_one) { args += [ "--tiered-stop-at-level-one" ] } + if (defined(invoker.extra_classpath_jars)) { + _rebased_extra_classpath_jars = + rebase_path(invoker.extra_classpath_jars, root_build_dir) + args += [ "--classpath=${_rebased_extra_classpath_jars}" ] + data += invoker.extra_classpath_jars + } if (defined(invoker.wrapper_script_args)) { args += [ "--" ] + invoker.wrapper_script_args } + if (defined(invoker.use_jdk_11) && invoker.use_jdk_11) { + args += [ "--use-jdk-11" ] + deps += [ "//third_party/jdk11:java_data" ] + } else { + deps += [ "//third_party/jdk:java_data" ] + } } } @@ -1424,7 +1458,7 @@ if (enable_java_templates) { !defined(invoker.enable_multidex) || invoker.enable_multidex _enable_main_dex_list = _enable_multidex && _min_sdk_version < 21 _enable_desugar = !defined(invoker.enable_desugar) || invoker.enable_desugar - _desugar_needs_classpath = _enable_desugar && !enable_bazel_desugar + _desugar_needs_classpath = _enable_desugar # It's not safe to dex merge with libraries dex'ed at higher api versions. assert(!_is_dex_merging || _min_sdk_version >= default_min_sdk_version) @@ -1448,6 +1482,10 @@ if (enable_java_templates) { assert(!(defined(invoker.apply_mapping) && !_proguard_enabled), "apply_mapping can only be specified if proguard is enabled.") + if (defined(invoker.custom_assertion_handler)) { + assert(_proguard_enabled, + "Proguard is required to support the custom assertion handler.") + } if (_enable_main_dex_list) { _main_dex_rules = "//build/android/main_dex_classes.flags" @@ -1463,23 +1501,23 @@ if (enable_java_templates) { proguard(_proguard_target_name) { forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ + "add_view_trace_events", "build_config", + "custom_assertion_handler", "data", "data_deps", "deps", - "desugar_jars_paths", - "disable_r8_outlining", "enable_proguard_checks", "expected_proguard_config", "expected_proguard_config_base", "ignore_desugar_missing_deps", - "is_static_library", "modules", + "package_name", "proguard_enable_obfuscation", "proguard_mapping_path", "proguard_sourcefile_suffix", - "supports_jdk_library_desugaring", "top_target_name", + "version_code", ]) inputs = [] if (defined(invoker.inputs)) { @@ -1495,17 +1533,12 @@ if (enable_java_templates) { ] if (defined(invoker.has_apk_under_test) && invoker.has_apk_under_test) { args += [ "--input-paths=@FileArg($_rebased_build_config:deps_info:device_classpath_extended)" ] + } else if (defined(invoker.add_view_trace_events) && + invoker.add_view_trace_events && defined(invoker.modules)) { + args += [ "--input-paths=@FileArg($_rebased_build_config:deps_info:trace_event_rewritten_device_classpath)" ] } else { args += [ "--input-paths=@FileArg($_rebased_build_config:deps_info:device_classpath)" ] } - if (enable_bazel_desugar) { - deps += [ "//third_party/bazel/desugar:desugar_runtime_java" ] - inputs += [ _desugar_runtime_jar ] - args += [ - "--input-paths", - rebase_path(_desugar_runtime_jar, root_build_dir), - ] - } if (defined(invoker.proguard_args)) { args += invoker.proguard_args } @@ -1540,18 +1573,14 @@ if (enable_java_templates) { } } else { # !_proguard_enabled _is_library = defined(invoker.is_library) && invoker.is_library + assert(!(defined(invoker.input_classes_filearg) && _is_library)) + assert(_is_library == defined(invoker.unprocessed_jar_path)) _input_class_jars = [] if (defined(invoker.input_class_jars)) { _input_class_jars = invoker.input_class_jars } _deps = invoker.deps - if (!_is_library && enable_bazel_desugar) { - # It would be more efficient to use the pre-dex'ed copy of the runtime, - # but it's easier to add it in this way. - _deps += [ "//third_party/bazel/desugar:desugar_runtime_java" ] - _input_class_jars += [ _desugar_runtime_jar ] - } if (_input_class_jars != []) { _rebased_input_class_jars = rebase_path(_input_class_jars, root_build_dir) @@ -1568,9 +1597,12 @@ if (enable_java_templates) { depfile = "$target_gen_dir/$target_name.d" outputs = [ invoker.output ] inputs = [ - _r8_path, + _d8_path, _custom_d8_path, ] + if (defined(invoker.inputs)) { + inputs += invoker.inputs + } if (!_is_library) { # http://crbug.com/725224. Fix for bots running out of memory. @@ -1588,7 +1620,7 @@ if (enable_java_templates) { rebase_path(outputs[0], root_build_dir), "--min-api=$_min_sdk_version", "--r8-jar-path", - rebase_path(_r8_path, root_build_dir), + rebase_path(_d8_path, root_build_dir), "--custom-d8-jar-path", rebase_path(_custom_d8_path, root_build_dir), @@ -1636,32 +1668,16 @@ if (enable_java_templates) { if (defined(invoker.input_classes_filearg)) { inputs += [ invoker.build_config ] args += [ "--class-inputs-filearg=${invoker.input_classes_filearg}" ] + + # Required for the same reason as unprocessed_jar_path is added to + # classpath (see note below). + args += [ "--classpath=${invoker.input_classes_filearg}" ] } if (_input_class_jars != []) { inputs += _input_class_jars args += [ "--class-inputs=${_rebased_input_class_jars}" ] } - if (defined(invoker.dexlayout_profile)) { - args += [ - "--dexlayout-profile", - rebase_path(invoker.dexlayout_profile, root_build_dir), - "--dexlayout-path", - rebase_path(_dexlayout_path, root_build_dir), - "--profman-path", - rebase_path(_profman_path, root_build_dir), - "--dexdump-path", - rebase_path(_dexdump_path, root_build_dir), - ] - inputs += [ - _dexlayout_path, - _profman_path, - _dexdump_path, - invoker.dexlayout_profile, - ] - inputs += _default_art_libs - } - # Never compile intemediates with --release in order to: # 1) not require recompiles when toggling is_java_debug, # 2) allow incremental_install=1 to still have local variable @@ -1673,52 +1689,46 @@ if (enable_java_templates) { if (_enable_desugar) { args += [ "--desugar" ] - # Passing the flag for dex merging causes invalid dex files to be created. - if (enable_jdk_library_desugaring && !_is_dex_merging) { - inputs += [ _desugar_jdk_libs_json ] - args += [ - "--desugar-jdk-libs-json", - rebase_path(_desugar_jdk_libs_json, root_build_dir), - ] - } _ignore_desugar_missing_deps = defined(invoker.ignore_desugar_missing_deps) && invoker.ignore_desugar_missing_deps - if (!_ignore_desugar_missing_deps && !enable_bazel_desugar) { + if (!_ignore_desugar_missing_deps) { args += [ "--show-desugar-default-interface-warnings" ] } } if (_desugar_needs_classpath) { + # Cannot use header jar for the active jar, because it does not + # contain anonymous classes. https://crbug.com/1342018#c5 + # Cannot use processed .jar here because it might have classes + # filtered out via jar_excluded_patterns. + # Must come first in classpath in order to take precedence over + # deps that defined the same classes (via jar_excluded_patterns). + if (defined(invoker.unprocessed_jar_path)) { + args += [ + "--classpath", + rebase_path(invoker.unprocessed_jar_path, root_build_dir), + + # Pass the full classpath to find new dependencies that are not in + # the .desugardeps file. + "--classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)", + ] + inputs += [ invoker.unprocessed_jar_path ] + } _desugar_dependencies_path = "$target_gen_dir/$target_name.desugardeps" args += [ "--desugar-dependencies", rebase_path(_desugar_dependencies_path, root_build_dir), "--bootclasspath=@FileArg($_rebased_build_config:android:sdk_jars)", - - # Pass the full classpath to find new dependencies that are not in - # the .desugardeps file. - "--classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)", ] - if (defined(invoker.desugar_jars_paths)) { - _rebased_desugar_jars_paths = - rebase_path(invoker.desugar_jars_paths, root_build_dir) - args += [ "--classpath=${_rebased_desugar_jars_paths}" ] - } - if (defined(invoker.final_ijar_path)) { - # Need to include the input .interface.jar on the classpath in order to make - # jar_excluded_patterns classes visible to desugar. - args += [ - "--classpath", - rebase_path(invoker.final_ijar_path, root_build_dir), - ] - inputs += [ invoker.final_ijar_path ] - } - } else { - not_needed(invoker, [ "desugar_jars_paths" ]) } - if (enable_java_asserts) { + if (defined(invoker.custom_assertion_handler)) { + args += [ + "--assertion-handler", + invoker.custom_assertion_handler, + ] + } else if (enable_java_asserts) { # The default for generating dex file format is # --force-disable-assertions. args += [ "--force-enable-assertions" ] @@ -1727,38 +1737,6 @@ if (enable_java_templates) { } } - # Variables - # output: Path to output ".l8.dex". - # min_sdk_version: The minimum Android SDK version this target supports. - template("dex_jdk_libs") { - action_with_pydeps(target_name) { - script = "//build/android/gyp/dex_jdk_libs.py" - inputs = [ - _r8_path, - _desugar_jdk_libs_json, - _desugar_jdk_libs_jar, - _desugar_jdk_libs_configuration_jar, - ] - outputs = [ invoker.output ] - args = [ - "--r8-path", - rebase_path(_r8_path, root_build_dir), - "--desugar-jdk-libs-json", - rebase_path(_desugar_jdk_libs_json, root_build_dir), - "--desugar-jdk-libs-jar", - rebase_path(_desugar_jdk_libs_jar, root_build_dir), - "--desugar-jdk-libs-configuration-jar", - rebase_path(_desugar_jdk_libs_configuration_jar, root_build_dir), - "--output", - rebase_path(invoker.output, root_build_dir), - "--min-api=${invoker.min_sdk_version}", - ] - if (treat_warnings_as_errors) { - args += [ "--warnings-as-errors" ] - } - } - } - template("jacoco_instr") { action_with_pydeps(target_name) { forward_variables_from(invoker, @@ -1773,7 +1751,7 @@ if (enable_java_templates) { _jacococli_jar = "//third_party/jacoco/lib/jacococli.jar" script = "//build/android/gyp/jacoco_instr.py" - inputs = invoker.java_files + [ + inputs = invoker.source_files + [ _jacococli_jar, invoker.input_jar_path, ] @@ -1788,8 +1766,8 @@ if (enable_java_templates) { rebase_path(invoker.output_jar_path, root_build_dir), "--sources-json-file", rebase_path(_sources_json_file, root_build_dir), - "--java-sources-file", - rebase_path(invoker.java_sources_file, root_build_dir), + "--target-sources-file", + rebase_path(invoker.target_sources_file, root_build_dir), "--jacococli-jar", rebase_path(_jacococli_jar, root_build_dir), ] @@ -1805,7 +1783,12 @@ if (enable_java_templates) { template("filter_jar") { action_with_pydeps(target_name) { script = "//build/android/gyp/filter_zip.py" - forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ]) + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "deps", + "data", + "data_deps", + ]) inputs = [ invoker.input_jar ] if (defined(invoker.inputs)) { inputs += invoker.inputs @@ -1820,8 +1803,6 @@ if (enable_java_templates) { if (defined(invoker.jar_included_patterns)) { _jar_included_patterns = invoker.jar_included_patterns } - _strip_resource_classes = defined(invoker.strip_resource_classes) && - invoker.strip_resource_classes args = [ "--input", rebase_path(invoker.input_jar, root_build_dir), @@ -1830,176 +1811,59 @@ if (enable_java_templates) { "--exclude-globs=${_jar_excluded_patterns}", "--include-globs=${_jar_included_patterns}", ] - if (_strip_resource_classes) { - inputs += [ invoker.build_config ] - _rebased_build_config = - rebase_path(invoker.build_config, root_build_dir) - args += [ "--strip-resource-classes-for=@FileArg($_rebased_build_config:javac:resource_packages)" ] - } } } - template("process_java_prebuilt") { + template("process_java_library") { forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) - _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) - not_needed([ "_rebased_build_config" ]) - not_needed(invoker, [ "build_config_dep" ]) - - _deps = invoker.jar_deps _previous_output_jar = invoker.input_jar_path - # Create the .jar in lib.java for use by java_binary. - if (defined(invoker.host_jar_path)) { - if (defined(invoker.jacoco_instrument) && invoker.jacoco_instrument) { - _filter_jar_target_name = "${target_name}_host__filter_jar" - _filter_jar_output_jar = "$target_out_dir/$target_name.host_filter.jar" - } else { - _filter_jar_target_name = "${target_name}_host" - _filter_jar_output_jar = invoker.host_jar_path - } - filter_jar(_filter_jar_target_name) { - forward_variables_from(invoker, - [ - "jar_excluded_patterns", - "jar_included_patterns", - "strip_resource_classes", - ]) - deps = _deps - input_jar = _previous_output_jar - output_jar = _filter_jar_output_jar - inputs = [] - if (defined(strip_resource_classes) && strip_resource_classes) { - inputs += [ invoker.build_config ] - deps += [ invoker.build_config_dep ] - args += [ "--strip-resource-classes-for=@FileArg($_rebased_build_config:javac:resource_packages)" ] - } - if (defined(invoker.inputs)) { - inputs += invoker.inputs - deps += invoker.input_deps - } - } - - if (defined(invoker.jacoco_instrument) && invoker.jacoco_instrument) { - # Jacoco must run after desugar (or else desugar sometimes fails). - # It must run after filtering to avoid the same (filtered) class mapping - # to multiple .jar files. - # We run offline code coverage processing here rather than with a - # javaagent as the desired coverage data was not being generated. - # See crbug.com/1097815. - jacoco_instr("${target_name}_host") { - deps = [ ":$_filter_jar_target_name" ] + invoker.jar_deps - forward_variables_from(invoker, - [ - "java_files", - "java_sources_file", - ]) - - input_jar_path = _filter_jar_output_jar - output_jar_path = invoker.host_jar_path - } - } + if (invoker.jacoco_instrument) { + _filter_jar_target_name = "${target_name}__filter_jar" + _filter_jar_output_jar = "$target_out_dir/$target_name.filter.jar" + } else { + _filter_jar_target_name = target_name + _filter_jar_output_jar = invoker.output_jar_path } - if (defined(invoker.device_jar_path)) { - if (invoker.enable_desugar) { - _desugar_target = "${target_name}_device__desugar" - _desugar_output_jar = "$target_out_dir/$target_name.desugar.jar" - - action_with_pydeps(_desugar_target) { - script = "//build/android/gyp/desugar.py" - deps = _deps + invoker.classpath_deps - depfile = "$target_gen_dir/$target_name.d" - _desugar_jar = "//third_party/bazel/desugar/Desugar.jar" - - inputs = [ - invoker.build_config, - _previous_output_jar, - _desugar_jar, - ] - outputs = [ _desugar_output_jar ] - args = [ - "--desugar-jar", - rebase_path(_desugar_jar, root_build_dir), - "--input-jar", - rebase_path(_previous_output_jar, root_build_dir), - "--output-jar", - rebase_path(_desugar_output_jar, root_build_dir), - - # Temporarily using java_full_interface_classpath until classpath validation of targets - # is implemented, see http://crbug.com/885273 - "--classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)", - "--bootclasspath=@FileArg($_rebased_build_config:android:sdk_interface_jars)", - "--depfile", - rebase_path(depfile, root_build_dir), - ] - if (defined(invoker.desugar_jars_paths)) { - _rebased_desugar_jars_paths = - rebase_path(invoker.desugar_jars_paths, root_build_dir) - args += [ "--classpath=${_rebased_desugar_jars_paths}" ] - } - if (treat_warnings_as_errors) { - args += [ "--warnings-as-errors" ] - } - } - - _deps = [] - _deps = [ ":$_desugar_target" ] - _previous_output_jar = _desugar_output_jar - } - - if (invoker.jacoco_instrument) { - _filter_jar_target_name = "${target_name}_device__filter_jar" - _filter_jar_output_jar = - "$target_out_dir/$target_name.device_filter.jar" - } else { - _filter_jar_target_name = "${target_name}_device" - _filter_jar_output_jar = invoker.device_jar_path - } - filter_jar(_filter_jar_target_name) { + filter_jar(_filter_jar_target_name) { + forward_variables_from(invoker, + [ + "data", + "data_deps", + "jar_excluded_patterns", + "jar_included_patterns", + ]) + deps = invoker.deps + input_jar = _previous_output_jar + output_jar = _filter_jar_output_jar + } + + if (invoker.jacoco_instrument) { + # Jacoco must run after desugar (or else desugar sometimes fails). + # It must run after filtering to avoid the same (filtered) class mapping + # to multiple .jar files. + # We run offline code coverage processing here rather than with a + # javaagent as the desired coverage data was not being generated. + # See crbug.com/1097815. + jacoco_instr(target_name) { + deps = [ ":$_filter_jar_target_name" ] + invoker.deps forward_variables_from(invoker, [ - "jar_excluded_patterns", - "jar_included_patterns", - "strip_resource_classes", + "source_files", + "target_sources_file", ]) - deps = _deps - input_jar = _previous_output_jar - output_jar = _filter_jar_output_jar - inputs = [] - if (defined(strip_resource_classes) && strip_resource_classes) { - inputs += [ invoker.build_config ] - deps += [ invoker.build_config_dep ] - args += [ "--strip-resource-classes-for=@FileArg($_rebased_build_config:javac:resource_packages)" ] - } - if (!defined(invoker.host_jar_path) && defined(invoker.inputs)) { - inputs += invoker.inputs - deps += invoker.input_deps - } - } - - if (invoker.jacoco_instrument) { - # Jacoco must run after desugar (or else desugar sometimes fails). - # It must run after filtering to avoid the same (filtered) class mapping - # to multiple .jar files. - jacoco_instr("${target_name}_device") { - deps = [ ":$_filter_jar_target_name" ] + invoker.jar_deps - forward_variables_from(invoker, - [ - "java_files", - "java_sources_file", - ]) - input_jar_path = _filter_jar_output_jar - output_jar_path = invoker.device_jar_path - } + input_jar_path = _filter_jar_output_jar + output_jar_path = invoker.output_jar_path } } } template("bytecode_processor") { action_with_pydeps(target_name) { - forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "data_deps" ]) _bytecode_checker_script = "$root_build_dir/bin/helper/bytecode_processor" script = "//build/android/gyp/bytecode_processor.py" inputs = [ @@ -2014,7 +1878,7 @@ if (enable_java_templates) { _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) args = [ "--target-name", - get_label_info(target_name, "label_no_toolchain"), + get_label_info(":${target_name}", "label_no_toolchain"), "--script", rebase_path(_bytecode_checker_script, root_build_dir), "--gn-target=${invoker.target_label}", @@ -2026,7 +1890,10 @@ if (enable_java_templates) { "--full-classpath-jars=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)", "--full-classpath-gn-targets=@FileArg($_rebased_build_config:deps_info:javac_full_classpath_targets)", ] - if (invoker.requires_android) { + if (android_static_analysis == "build_server") { + args += [ "--use-build-server" ] + } + if (invoker.include_android_sdk) { args += [ "--sdk-classpath-jars=@FileArg($_rebased_build_config:android:sdk_jars)" ] } if (invoker.is_prebuilt) { @@ -2052,6 +1919,7 @@ if (enable_java_templates) { inputs = [ invoker.build_config, invoker.input_manifest, + _manifest_merger_jar_path, ] outputs = [ invoker.output_manifest ] @@ -2060,9 +1928,8 @@ if (enable_java_templates) { args = [ "--depfile", rebase_path(depfile, root_build_dir), - "--android-sdk-cmdline-tools", - rebase_path("${public_android_sdk_root}/cmdline-tools/latest", - root_build_dir), + "--manifest-merger-jar", + rebase_path(_manifest_merger_jar_path, root_build_dir), "--root-manifest", rebase_path(invoker.input_manifest, root_build_dir), "--output", @@ -2093,7 +1960,7 @@ if (enable_java_templates) { # Input variables: # deps: Specifies the input dependencies for this target. # - # build_config: Path to the .build_config file corresponding to the target. + # build_config: Path to the .build_config.json file corresponding to the target. # # sources: # List of input resource files. @@ -2123,6 +1990,7 @@ if (enable_java_templates) { forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps", + "public_deps", "sources", ]) script = "//build/android/gyp/prepare_resources.py" @@ -2161,11 +2029,15 @@ if (enable_java_templates) { if (defined(invoker.strip_drawables) && invoker.strip_drawables) { args += [ "--strip-drawables" ] } + if (defined(invoker.allow_missing_resources) && + invoker.allow_missing_resources) { + args += [ "--allow-missing-resources" ] + } } } # A template that is used to compile all resources needed by a binary - # (e.g. an android_apk or a junit_binary) into an intermediate .ar_ + # (e.g. an android_apk or a robolectric_binary) into an intermediate .ar_ # archive. It can also generate an associated .srcjar that contains the # final R.java sources for all resource packages the binary depends on. # @@ -2174,9 +2046,9 @@ if (enable_java_templates) { # # deps: Specifies the input dependencies for this target. # - # build_config: Path to the .build_config file corresponding to the target. + # build_config: Path to the .build_config.json file corresponding to the target. # - # build_config_dep: Dep target to generate the .build_config file. + # build_config_dep: Dep target to generate the .build_config.json file. # # android_manifest: Path to root manifest for the binary. # @@ -2210,10 +2082,6 @@ if (enable_java_templates) { # resources to put in the final output, even if aapt_locale_allowlist # is defined to a smaller subset. # - # support_zh_hk: (optional) - # If true, support zh-HK in Chrome on Android by using the resources - # from zh-TW. See https://crbug.com/780847. - # # aapt_locale_allowlist: (optional) # Restrict compiled locale-dependent resources to a specific allowlist. # NOTE: This is a list of Chromium locale names, not Android ones. @@ -2229,8 +2097,6 @@ if (enable_java_templates) { # # resource_values_filter_rules: (optional) # - # no_xml_namespaces: (optional) - # # png_to_webp: (optional) # If true, convert all PNG resources (except 9-patch files) to WebP. # @@ -2251,29 +2117,15 @@ if (enable_java_templates) { # Use resource IDs provided by another APK target when compiling resources # (via. "aapt2 link --stable-ids") # - # short_resource_paths: (optional) - # Rename the paths within a the apk to be randomly generated short - # strings to reduce binary size. - # - # strip_resource_names: (optional) - # Strip resource names from the resources table of the apk. # # Output variables: # arsc_output: Path to output .ap_ file (optional). # # proto_output: Path to output .proto.ap_ file (optional). # - # optimized_arsc_output: Path to optimized .ap_ file (optional). - # - # optimized_proto_output: Path to optimized .proto.ap_ file (optional). - # # r_text_out_path: (optional): # Path for the corresponding generated R.txt file. # - # resources_path_map_out_path: (optional): - # Path for the generated map between original resource paths and - # shortend resource paths. - # # proguard_file: (optional) # Path to proguard configuration file for this apk target. # @@ -2282,30 +2134,17 @@ if (enable_java_templates) { template("compile_resources") { forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) - _deps = [ - invoker.android_sdk_dep, - invoker.build_config_dep, - ] + _deps = invoker.deps + [ + invoker.android_sdk_dep, + invoker.build_config_dep, + ] if (defined(invoker.android_manifest_dep)) { _deps += [ invoker.android_manifest_dep ] } - foreach(_dep, invoker.deps) { - _target_label = get_label_info(_dep, "label_no_toolchain") - if (filter_exclude([ _target_label ], _java_library_patterns) == [] && - filter_exclude([ _target_label ], _java_resource_patterns) != []) { - # Depend on the java libraries' transitive __assetres target instead. - _deps += [ "${_target_label}__assetres" ] - } else { - _deps += [ _dep ] - } - } if (defined(invoker.arsc_output)) { _arsc_output = invoker.arsc_output } - if (defined(invoker.optimized_arsc_output)) { - _optimized_arsc_output = invoker.optimized_arsc_output - } _final_srcjar_path = "${target_gen_dir}/${target_name}.srcjar" _script = "//build/android/gyp/compile_resources.py" @@ -2323,7 +2162,6 @@ if (enable_java_templates) { rebase_path(android_sdk_tools_bundle_aapt2, root_build_dir), "--dependencies-res-zips=@FileArg($_rebased_build_config:deps_info:dependency_zips)", "--extra-res-packages=@FileArg($_rebased_build_config:deps_info:extra_package_names)", - "--extra-main-r-text-files=@FileArg($_rebased_build_config:deps_info:extra_main_r_text_files)", "--min-sdk-version=${invoker.min_sdk_version}", "--target-sdk-version=${invoker.target_sdk_version}", "--webp-cache-dir=obj/android-webp-cache", @@ -2337,9 +2175,6 @@ if (enable_java_templates) { "--srcjar-out", rebase_path(_final_srcjar_path, root_build_dir), ] - if (defined(invoker.no_xml_namespaces) && invoker.no_xml_namespaces) { - _args += [ "--no-xml-namespaces" ] - } if (defined(invoker.version_code)) { _args += [ "--version-code", @@ -2373,36 +2208,6 @@ if (enable_java_templates) { rebase_path(invoker.size_info_path, root_build_dir), ] } - if (defined(_optimized_arsc_output)) { - _outputs += [ _optimized_arsc_output ] - _args += [ - "--optimized-arsc-path", - rebase_path(_optimized_arsc_output, root_build_dir), - ] - } - if (defined(invoker.optimized_proto_output)) { - _outputs += [ invoker.optimized_proto_output ] - _args += [ - "--optimized-proto-path", - rebase_path(invoker.optimized_proto_output, root_build_dir), - ] - } - if (defined(invoker.resources_config_paths)) { - _inputs += invoker.resources_config_paths - _rebased_resource_configs = - rebase_path(invoker.resources_config_paths, root_build_dir) - _args += [ "--resources-config-paths=${_rebased_resource_configs}" ] - } - if (defined(invoker.short_resource_paths) && invoker.short_resource_paths) { - _args += [ "--short-resource-paths" ] - if (defined(invoker.resources_path_map_out_path)) { - _outputs += [ invoker.resources_path_map_out_path ] - _args += [ - "--resources-path-map-out-path", - rebase_path(invoker.resources_path_map_out_path, root_build_dir), - ] - } - } if (defined(invoker.r_java_root_package_name)) { _args += [ @@ -2411,10 +2216,6 @@ if (enable_java_templates) { ] } - if (defined(invoker.strip_resource_names) && invoker.strip_resource_names) { - _args += [ "--strip-resource-names" ] - } - # Useful to have android:debuggable in the manifest even for Release # builds. Just omit it for officai if (debuggable_apks) { @@ -2526,11 +2327,8 @@ if (enable_java_templates) { [ "--values-filter-rules=${invoker.resource_values_filter_rules}" ] } - if (defined(invoker.support_zh_hk) && invoker.support_zh_hk) { - _args += [ "--support-zh-hk" ] - } - if (defined(invoker.include_resource)) { + _inputs += [ invoker.include_resource ] _rebased_include_resources = rebase_path(invoker.include_resource, root_build_dir) _args += [ "--include-resources=$_rebased_include_resources" ] @@ -2615,18 +2413,20 @@ if (enable_java_templates) { ] inputs += [ invoker.expected_android_manifest_base ] } - if (fail_on_android_expectations) { - args += [ "--fail-on-expectations" ] + if (defined(invoker.expected_android_manifest_version_code_offset)) { + args += [ + "--verification-version-code-offset", + invoker.expected_android_manifest_version_code_offset, + ] } - if (defined(invoker.extra_verification_manifest)) { - inputs += [ invoker.extra_verification_manifest ] + if (defined(invoker.expected_android_manifest_library_version_offset)) { args += [ - "--extra-verification-manifest", - rebase_path(invoker.extra_verification_manifest, root_build_dir), + "--verification-library-version-offset", + invoker.expected_android_manifest_library_version_offset, ] - if (defined(invoker.extra_verification_manifest_dep)) { - deps += [ invoker.extra_verification_manifest_dep ] - } + } + if (fail_on_android_expectations) { + args += [ "--fail-on-expectations" ] } } _deps += [ ":$_expectations_target" ] @@ -2645,35 +2445,122 @@ if (enable_java_templates) { } } + # A template that is used to optimize compiled resources using aapt2 optimize. + # + # proto_input_path: + # Path to input compiled .proto.ap_ file. + # + # short_resource_paths: (optional) + # Rename the paths within a the apk to be randomly generated short + # strings to reduce binary size. + # + # strip_resource_names: (optional) + # Strip resource names from the resources table of the apk. + # + # resources_configs_paths: (optional) + # List of resource configs to use for optimization. + # + # optimized_proto_output: + # Path to output optimized .proto.ap_ file. + # + # resources_path_map_out_path: (optional): + # Path for the generated map between original resource paths and + # shortened resource paths. + template("optimize_resources") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + action_with_pydeps(target_name) { + forward_variables_from(invoker, [ "deps" ]) + script = "//build/android/gyp/optimize_resources.py" + outputs = [ invoker.optimized_proto_output ] + inputs = [ + android_sdk_tools_bundle_aapt2, + invoker.r_text_path, + invoker.proto_input_path, + ] + args = [ + "--aapt2-path", + rebase_path(android_sdk_tools_bundle_aapt2, root_build_dir), + "--r-text-in", + rebase_path(invoker.r_text_path, root_build_dir), + "--proto-path", + rebase_path(invoker.proto_input_path, root_build_dir), + "--optimized-proto-path", + rebase_path(invoker.optimized_proto_output, root_build_dir), + ] + + if (defined(invoker.resources_config_paths)) { + inputs += invoker.resources_config_paths + _rebased_resource_configs = + rebase_path(invoker.resources_config_paths, root_build_dir) + args += [ "--resources-config-paths=${_rebased_resource_configs}" ] + } + + if (defined(invoker.short_resource_paths) && + invoker.short_resource_paths) { + args += [ "--short-resource-paths" ] + if (defined(invoker.resources_path_map_out_path)) { + outputs += [ invoker.resources_path_map_out_path ] + args += [ + "--resources-path-map-out-path", + rebase_path(invoker.resources_path_map_out_path, root_build_dir), + ] + } + } + + if (defined(invoker.strip_resource_names) && + invoker.strip_resource_names) { + args += [ "--strip-resource-names" ] + } + } + } + + # A template that is used to find unused resources. template("unused_resources") { - _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) - _shrinker_dep = "//build/android/gyp/resources_shrinker:resources_shrinker" - _shrinker_script = "$root_build_dir/bin/helper/resources_shrinker" action_with_pydeps(target_name) { forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ]) - script = "//build/android/gyp/resources_shrinker/shrinker.py" - inputs = [ - invoker.build_config, - invoker.proguard_mapping_path, - _shrinker_script, + script = "//build/android/gyp/unused_resources.py" + depfile = "$target_gen_dir/${target_name}.d" + _unused_resources_script = "$root_build_dir/bin/helper/unused_resources" + inputs = [ _unused_resources_script ] + outputs = [ + invoker.output_config, + invoker.output_r_txt, ] - outputs = [ invoker.output_config ] if (!defined(deps)) { deps = [] } - deps += [ _shrinker_dep ] + deps += [ "//build/android/unused_resources:unused_resources" ] + _rebased_module_build_config = + rebase_path(invoker.build_config, root_build_dir) args = [ "--script", - rebase_path(_shrinker_script, root_build_dir), - "--dependencies-res-zips=@FileArg($_rebased_build_config:deps_info:dependency_zips)", - "--proguard-mapping", - rebase_path(invoker.proguard_mapping_path, root_build_dir), - "--r-text=@FileArg($_rebased_build_config:deps_info:r_text_path)", - "--dex=@FileArg($_rebased_build_config:final_dex:path)", - "--android-manifest=@FileArg($_rebased_build_config:deps_info:android_manifest)", + rebase_path(_unused_resources_script, root_build_dir), "--output-config", rebase_path(invoker.output_config, root_build_dir), + "--r-text-in=@FileArg($_rebased_module_build_config:deps_info:r_text_path)", + "--r-text-out", + rebase_path(invoker.output_r_txt, root_build_dir), + "--dependencies-res-zips=@FileArg($_rebased_module_build_config:deps_info:dependency_zips)", + "--depfile", + rebase_path(depfile, root_build_dir), ] + + if (defined(invoker.proguard_mapping_path)) { + inputs += [ invoker.proguard_mapping_path ] + args += [ + "--proguard-mapping", + rebase_path(invoker.proguard_mapping_path, root_build_dir), + ] + } + + foreach(_build_config, invoker.all_module_build_configs) { + inputs += [ _build_config ] + _rebased_build_config = rebase_path(_build_config, root_build_dir) + args += [ + "--dexes=@FileArg($_rebased_build_config:final_dex:path)", + "--android-manifests=@FileArg($_rebased_build_config:deps_info:merged_android_manifest)", + ] + } } } @@ -2740,12 +2627,49 @@ if (enable_java_templates) { } } + template("create_binary_profile") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, [ "deps" ]) + script = "//build/android/gyp/binary_baseline_profile.py" + depfile = "$target_gen_dir/$target_name.d" + outputs = [ + invoker.binary_baseline_profile_path, + invoker.binary_baseline_profile_metadata_path, + ] + _profgen_path = "$android_sdk_root/cmdline-tools/latest/bin/profgen" + _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) + inputs = [ + invoker.build_config, + invoker.proguard_mapping_path, + invoker.input_profile_path, + _profgen_path, + ] + args = [ + "--profgen", + rebase_path(_profgen_path, root_build_dir), + "--output-profile", + rebase_path(invoker.binary_baseline_profile_path, root_build_dir), + "--output-metadata", + rebase_path(invoker.binary_baseline_profile_metadata_path, + root_build_dir), + "--dex=@FileArg($_rebased_build_config:final_dex:path)", + "--proguard-mapping", + rebase_path(invoker.proguard_mapping_path, root_build_dir), + "--input-profile-path", + rebase_path(invoker.input_profile_path, root_build_dir), + "--depfile", + rebase_path(depfile, root_build_dir), + ] + } + } + # Creates a signed and aligned .apk. # # Variables # apk_name: (optional) APK name (without .apk suffix). If provided, will # be used to generate .info files later used by the supersize tool. - # assets_build_config: Path to android_apk .build_config containing merged + # assets_build_config: Path to android_apk .build_config.json containing merged # asset information. # deps: Specifies the dependencies of this target. # dex_path: Path to classes.dex file to include (optional). @@ -2753,7 +2677,6 @@ if (enable_java_templates) { # and assets is consistent with the given expectation file. # expected_libs_and_assets_base: Treat expected_libs_and_assets as a diff # with this file as the base. - # jdk_libs_dex: Path to classes.dex for desugar_jdk_libs. # packaged_resources_path: Path to .ap_ to use. # output_apk_path: Output path for the generated .apk. # min_sdk_version: The minimum Android SDK version this target supports. @@ -2770,10 +2693,10 @@ if (enable_java_templates) { # keystore_path: Path to keystore to use for signing. # keystore_name: Key alias to use. # keystore_password: Keystore password. - # uncompress_shared_libraries: (optional, default false) Whether to store - # native libraries inside the APK uncompressed and page-aligned. template("package_apk") { forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "public_deps" ]) + _is_robolectric_apk = + defined(invoker.is_robolectric_apk) && invoker.is_robolectric_apk _deps = invoker.deps _native_lib_placeholders = [] if (defined(invoker.native_lib_placeholders)) { @@ -2786,16 +2709,8 @@ if (enable_java_templates) { } _script = "//build/android/gyp/apkbuilder.py" - _apksigner = "$android_sdk_build_tools/lib/apksigner.jar" - _zipalign = "$android_sdk_build_tools/zipalign" - _inputs = [ - invoker.build_config, - invoker.keystore_path, - invoker.packaged_resources_path, - _apksigner, - _zipalign, - ] + _inputs = [ invoker.packaged_resources_path ] _outputs = [ invoker.output_apk_path ] _data = [ invoker.output_apk_path ] @@ -2804,45 +2719,75 @@ if (enable_java_templates) { rebase_path(invoker.packaged_resources_path, root_build_dir) _rebased_packaged_apk_path = rebase_path(invoker.output_apk_path, root_build_dir) - _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) _args = [ "--resource-apk=$_rebased_compiled_resources_path", "--output-apk=$_rebased_packaged_apk_path", - "--assets=@FileArg($_rebased_build_config:assets)", - "--uncompressed-assets=@FileArg($_rebased_build_config:uncompressed_assets)", - "--apksigner-jar", - rebase_path(_apksigner, root_build_dir), - "--zipalign-path", - rebase_path(_zipalign, root_build_dir), - "--key-path", - rebase_path(invoker.keystore_path, root_build_dir), - "--key-name", - invoker.keystore_name, - "--key-passwd", - invoker.keystore_password, "--min-sdk-version=${invoker.min_sdk_version}", - - # TODO(mlopatkin) We are relying on the fact that build_config is an APK - # build_config. - "--java-resources=@FileArg($_rebased_build_config:java_resources_jars)", ] - if (is_official_build) { - _args += [ "--best-compression" ] + + # system_image_stub_apk does not use a build_config.json. + if (defined(invoker.build_config)) { + _inputs += [ invoker.build_config ] + _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) + _args += [ + "--assets=@FileArg($_rebased_build_config:assets)", + "--uncompressed-assets=@FileArg($_rebased_build_config:uncompressed_assets)", + ] + if (!_is_robolectric_apk) { + _args += [ "--java-resources=@FileArg($_rebased_build_config:java_resources_jars)" ] + } } - if (defined(invoker.uncompress_dex) && invoker.uncompress_dex) { - _args += [ "--uncompress-dex" ] + if (defined(invoker.extra_assets)) { + _args += [ "--assets=${invoker.extra_assets}" ] } - if (defined(invoker.uncompress_shared_libraries) && - invoker.uncompress_shared_libraries) { - _args += [ "--uncompress-shared-libraries=True" ] + if (!_is_robolectric_apk) { + _apksigner = "$android_sdk_build_tools/lib/apksigner.jar" + _zipalign = "$android_sdk_build_tools/zipalign" + _keystore_path = android_keystore_path + _keystore_name = android_keystore_name + _keystore_password = android_keystore_password + + if (defined(invoker.keystore_path)) { + _keystore_path = invoker.keystore_path + _keystore_name = invoker.keystore_name + _keystore_password = invoker.keystore_password + } + + _inputs += [ + _apksigner, + _zipalign, + _keystore_path, + ] + _args += [ + "--apksigner-jar", + rebase_path(_apksigner, root_build_dir), + "--zipalign-path", + rebase_path(_zipalign, root_build_dir), + "--key-path", + rebase_path(_keystore_path, root_build_dir), + "--key-name", + _keystore_name, + "--key-passwd", + _keystore_password, + ] + if (is_official_build) { + _args += [ "--best-compression" ] + } + } + if (defined(invoker.uncompress_dex)) { + _uncompress_dex = invoker.uncompress_dex + } else { + # Uncompressed dex support started on Android P. + _uncompress_dex = invoker.min_sdk_version >= 28 + } + + if (_uncompress_dex) { + _args += [ "--uncompress-dex" ] } if (defined(invoker.library_always_compress)) { _args += [ "--library-always-compress=${invoker.library_always_compress}" ] } - if (defined(invoker.library_renames)) { - _args += [ "--library-renames=${invoker.library_renames}" ] - } if (defined(invoker.dex_path)) { _inputs += [ invoker.dex_path ] _args += [ @@ -2850,13 +2795,6 @@ if (enable_java_templates) { rebase_path(invoker.dex_path, root_build_dir), ] } - if (defined(invoker.jdk_libs_dex)) { - _inputs += [ invoker.jdk_libs_dex ] - _args += [ - "--jdk-libs-dex-file", - rebase_path(invoker.jdk_libs_dex, root_build_dir), - ] - } if ((defined(invoker.loadable_modules) && invoker.loadable_modules != []) || defined(invoker.native_libs_filearg) || _native_lib_placeholders != []) { @@ -2904,10 +2842,10 @@ if (enable_java_templates) { _failure_file = "$expectations_failure_dir/" + string_replace(invoker.expected_libs_and_assets, "/", "_") - inputs = [ - invoker.build_config, - invoker.expected_libs_and_assets, - ] + inputs = [ invoker.expected_libs_and_assets ] + if (defined(invoker.build_config)) { + inputs += [ invoker.build_config ] + } deps = [ invoker.build_config_dep ] outputs = [ _actual_file, @@ -2951,30 +2889,27 @@ if (enable_java_templates) { } # Compile Java source files into a .jar file, potentially using an - # annotation processor, and/or the errorprone compiler. + # annotation processor, and/or the errorprone compiler. Also includes Kotlin + # source files in the resulting info file. # # Note that the only way to specify custom annotation processors is # by using build_config to point to a file that corresponds to a java-related # target that includes javac:processor_classes entries (i.e. there is no # variable here that can be used for this purpose). # - # Note also the peculiar use of java_files / java_sources_file. The content - # of the java_files list and the java_sources_file file must match exactly. - # This rule uses java_files only to list the inputs to the action that - # calls compile_java.py, but will pass the list of Java source files - # with the '@${java_sources_file}" command-line syntax. Not a problem in - # practice since this is only called from java_library_impl() that sets up - # the variables properly. + # Note also the peculiar use of source_files / target_sources_file. The content + # of the source_files list and the source files in target_sources_file file must + # match exactly. # # Variables: # main_target_name: Used when extracting srcjars for codesearch. - # java_files: Optional list of Java source file paths. + # source_files: Optional list of Java and Kotlin source file paths. # srcjar_deps: Optional list of .srcjar dependencies (not file paths). # The corresponding source files they contain will be compiled too. - # java_sources_file: Optional path to file containing list of Java source - # file paths. This must always be provided if java_files is not empty - # and must match it exactly. - # build_config: Path to the .build_config file of the corresponding + # target_sources_file: Optional path to file containing list of source file + # paths. This must always be provided if java_files is not empty and the + # .java files in it must match the list of java_files exactly. + # build_config: Path to the .build_config.json file of the corresponding # java_library_impl() target. The following entries will be used by this # template: javac:srcjars, deps_info:javac_full_classpath, # deps_info:javac_full_interface_classpath, javac:processor_classpath, @@ -3018,7 +2953,7 @@ if (enable_java_templates) { _srcjar_deps = [] if (defined(invoker.srcjar_deps)) { - _srcjar_deps += invoker.srcjar_deps + _srcjar_deps = invoker.srcjar_deps } _java_srcjars = [] @@ -3029,11 +2964,8 @@ if (enable_java_templates) { } # generated_jar_path is an output when use_turbine and an input otherwise. - if (!invoker.use_turbine && defined(invoker.generated_jar_path)) { - _annotation_processing = false + if (!invoker.use_turbine) { _java_srcjars += [ invoker.generated_jar_path ] - } else { - _annotation_processing = true } _javac_args = [] @@ -3050,7 +2982,7 @@ if (enable_java_templates) { if (target_name == "chrome_java__header") { # Regression test for: https://crbug.com/1154302 - assert_no_deps = [ "//base:base_java__impl" ] + assert_no_deps = [ "//base:base_java__compile_java" ] } depfile = "$target_gen_dir/$target_name.d" @@ -3063,9 +2995,9 @@ if (enable_java_templates) { if (!invoker.enable_errorprone && !invoker.use_turbine) { outputs += [ invoker.output_jar_path + ".info" ] } - inputs = invoker.java_files + _java_srcjars + [ _build_config ] - if (invoker.java_files != []) { - inputs += [ invoker.java_sources_file ] + inputs = invoker.source_files + _java_srcjars + [ _build_config ] + if (invoker.source_files != []) { + inputs += [ invoker.target_sources_file ] } _rebased_build_config = rebase_path(_build_config, root_build_dir) @@ -3081,8 +3013,15 @@ if (enable_java_templates) { "--generated-dir=$_rebased_generated_dir", "--jar-path=$_rebased_output_jar_path", "--java-srcjars=$_rebased_java_srcjars", + "--target-name", + get_label_info(":${target_name}", "label_no_toolchain"), ] + # SDK jar must be first on classpath. + if (invoker.include_android_sdk) { + args += [ "--classpath=@FileArg($_rebased_build_config:android:sdk_interface_jars)" ] + } + if (defined(invoker.header_jar_path)) { inputs += [ invoker.header_jar_path ] args += [ @@ -3094,6 +3033,16 @@ if (enable_java_templates) { args += [ "--classpath=$_header_jar_classpath" ] } + if (defined(invoker.kotlin_jar_path)) { + inputs += [ invoker.kotlin_jar_path ] + _rebased_kotlin_jar_path = + rebase_path(invoker.kotlin_jar_path, root_build_dir) + args += [ + "--kotlin-jar-path=$_rebased_kotlin_jar_path", + "--classpath=$_rebased_kotlin_jar_path", + ] + } + if (invoker.use_turbine) { # Prefer direct deps for turbine as much as possible. args += [ "--classpath=@FileArg($_rebased_build_config:javac:interface_classpath)" ] @@ -3101,7 +3050,7 @@ if (enable_java_templates) { args += [ "--classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)" ] } - if (_annotation_processing) { + if (invoker.use_turbine) { args += [ "--processorpath=@FileArg($_rebased_build_config:javac:processor_classpath)", "--processors=@FileArg($_rebased_build_config:javac:processor_classes)", @@ -3120,10 +3069,6 @@ if (enable_java_templates) { ] } - # Currently turbine does not support JDK11. - if (invoker.supports_android || invoker.use_turbine) { - args += [ "--java-version=1.8" ] - } if (use_java_goma) { args += [ "--gomacc-path=$goma_dir/gomacc" ] @@ -3136,9 +3081,6 @@ if (enable_java_templates) { if (enable_kythe_annotations && !invoker.enable_errorprone) { args += [ "--enable-kythe-annotations" ] } - if (invoker.requires_android) { - args += [ "--bootclasspath=@FileArg($_rebased_build_config:android:sdk_interface_jars)" ] - } if (_chromium_code) { args += [ "--chromium-code=1" ] if (treat_warnings_as_errors) { @@ -3156,10 +3098,9 @@ if (enable_java_templates) { _dep_gen_dir = get_label_info(_errorprone_dep, "target_gen_dir") _dep_name = get_label_info(_errorprone_dep, "name") _rebased_errorprone_buildconfig = - rebase_path("$_dep_gen_dir/$_dep_name.build_config", root_build_dir) + rebase_path("$_dep_gen_dir/$_dep_name.build_config.json", + root_build_dir) args += [ - "--target-name", - get_label_info(target_name, "label_no_toolchain"), "--processorpath=@FileArg($_rebased_errorprone_buildconfig:deps_info:host_classpath)", "--enable-errorprone", ] @@ -3167,6 +3108,8 @@ if (enable_java_templates) { if (defined(invoker.skip_build_server) && invoker.skip_build_server) { # Nocompile tests need lint to fail through ninja. args += [ "--skip-build-server" ] + } else if (android_static_analysis == "build_server") { + args += [ "--use-build-server" ] } foreach(e, _processor_args) { @@ -3180,8 +3123,9 @@ if (enable_java_templates) { [ "--additional-jar-file=" + rebase_path(file_tuple[0], root_build_dir) + ":" + file_tuple[1] ] } - if (invoker.java_files != []) { - args += [ "@" + rebase_path(invoker.java_sources_file, root_build_dir) ] + if (invoker.source_files != []) { + args += + [ "@" + rebase_path(invoker.target_sources_file, root_build_dir) ] } foreach(e, _javac_args) { args += [ "--javac-arg=" + e ] @@ -3189,26 +3133,92 @@ if (enable_java_templates) { } } - template("java_lib_group") { - forward_variables_from(invoker, [ "testonly" ]) - _group_name = invoker.group_name - not_needed([ "_group_name" ]) - group(target_name) { + # Compile Kotlin source files into .class files and store them in a .jar. + # This explicitly does not run annotation processing on the Kotlin files. + # Java files and srcjars are also passed to kotlinc for reference, although + # no .class files will be generated for any Java files. A subsequent call to + # javac will be required to actually compile Java files into .class files. + # + # This action also creates a "header" .jar file for the Kotlin source files. + # It is similar to using turbine to create headers for Java files, but since + # turbine does not support Kotlin files, this is done via a plugin for + # kotlinc instead, at the same time as compilation (whereas turbine is run as + # a separate action before javac compilation). + template("compile_kt") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + _build_config = invoker.build_config + _chromium_code = invoker.chromium_code + + _srcjar_deps = [] + if (defined(invoker.srcjar_deps)) { + _srcjar_deps = invoker.srcjar_deps + } + + _java_srcjars = [] + foreach(dep, _srcjar_deps) { + _dep_gen_dir = get_label_info(dep, "target_gen_dir") + _dep_name = get_label_info(dep, "name") + _java_srcjars += [ "$_dep_gen_dir/$_dep_name.srcjar" ] + } + + action_with_pydeps(target_name) { + script = "//build/android/gyp/compile_kt.py" + depfile = "$target_gen_dir/$target_name.d" + deps = _srcjar_deps if (defined(invoker.deps)) { - deps = [] - foreach(_dep, invoker.deps) { - _target_label = get_label_info(_dep, "label_no_toolchain") - if (filter_exclude([ _target_label ], _java_library_patterns) == [] && - filter_exclude([ _target_label ], _java_resource_patterns) != - []) { - # This is a java library dep, so replace it. - deps += [ "${_target_label}__${_group_name}" ] - } else { - # Transitive java group targets should also include direct deps. - deps += [ _dep ] - } + deps += invoker.deps + } + + outputs = [ + invoker.output_jar_path, + invoker.output_interface_jar_path, + ] + inputs = invoker.source_files + _java_srcjars + [ + _build_config, + invoker.target_sources_file, + ] + + _rebased_build_config = rebase_path(_build_config, root_build_dir) + _rebased_output_jar_path = + rebase_path(invoker.output_jar_path, root_build_dir) + _rebased_output_interface_jar_path = + rebase_path(invoker.output_interface_jar_path, root_build_dir) + _rebased_java_srcjars = rebase_path(_java_srcjars, root_build_dir) + _rebased_depfile = rebase_path(depfile, root_build_dir) + _rebased_generated_dir = rebase_path( + "$target_gen_dir/${invoker.main_target_name}/generated_java", + root_build_dir) + args = [ + "--depfile=$_rebased_depfile", + "--generated-dir=$_rebased_generated_dir", + "--jar-path=$_rebased_output_jar_path", + "--interface-jar-path=$_rebased_output_interface_jar_path", + "--java-srcjars=$_rebased_java_srcjars", + ] + + # SDK jar must be first on classpath. + if (invoker.include_android_sdk) { + args += [ "--classpath=@FileArg($_rebased_build_config:android:sdk_interface_jars)" ] + } + + args += [ "--classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)" ] + + if (use_java_goma) { + args += [ "--gomacc-path=$goma_dir/gomacc" ] + + # Override the default action_pool when goma is enabled. + pool = "//build/config/android:goma_javac_pool" + } + + if (_chromium_code) { + args += [ "--chromium-code" ] + if (treat_warnings_as_errors) { + args += [ "--warnings-as-errors" ] } } + + args += [ "@" + rebase_path(invoker.target_sources_file, root_build_dir) ] } } @@ -3263,7 +3273,7 @@ if (enable_java_templates) { # # Variables: # type: type of Java target, valid values: 'java_library', 'java_binary', - # 'junit_binary', 'java_annotation_processor', and 'android_apk' + # 'robolectric_binary', 'java_annotation_processor', and 'android_apk' # main_target_name: optional. If provided, overrides target_name when # creating sub-targets (e.g. "${main_target_name}__dex") and # some output files (e.g. "${main_target_name}.sources"). Only used @@ -3271,21 +3281,21 @@ if (enable_java_templates) { # be the name of the main APK target. # supports_android: Optional. True if target can run on Android. # requires_android: Optional. True if target can only run on Android. - # java_files: Optional list of Java source file paths for this target. + # source_files: Optional list of Java source file paths for this target. # javac_args: Optional list of extra arguments to pass to javac. # errorprone_args: Optional list of extra arguments to pass to. # srcjar_deps: Optional list of .srcjar targets (not file paths). The Java # source files they contain will also be compiled for this target. - # java_sources_file: Optional path to a file which will be written with - # the content of java_files. If not provided, the file will be written + # target_sources_file: Optional path to a file which will be written with + # the content of source_files. If not provided, the file will be written # under $target_gen_dir/$main_target_name.sources. Ignored if - # java_files is empty. If not + # sources_files is empty. If not # jar_path: Optional path to a prebuilt .jar file for this target. # Mutually exclusive with java_files and srcjar_deps. # output_name: Optional output name for the final jar path. Used to # determine the name of the final jar. Default is to use the same # name as jar_path, if provided, or main_target_name. - # main_class: Main Java class name for 'java_binary', 'junit_binary' and + # main_class: Main Java class name for 'java_binary', 'robolectric_binary' and # 'java_annotation_processor' target types. Should not be set for other # ones. # deps: Dependencies for this target. @@ -3310,18 +3320,12 @@ if (enable_java_templates) { # input_jars_paths: Optional list of additional .jar file paths, which will # be added to the compile-time classpath when building this target (but # not to the runtime classpath). - # desugar_jars_paths: Optional list of additional .jar file paths, which will - # be added to the desugar classpath when building this target (but not to - # any other classpath). This is only used to break dependency cycles. # gradle_treat_as_prebuilt: Cause generate_gradle.py to reference this # library via its built .jar rather than including its .java sources. # proguard_enabled: Optional. True to enable ProGuard obfuscation. # proguard_configs: Optional list of additional proguard config file paths. - # bypass_platform_checks: Optional. If True, platform checks will not - # be performed. They are used to verify that every target with - # requires_android only depends on targets that, at least supports_android. - # Similarly, if a target has !supports_android, then it cannot depend on - # any other target that has requires_android. + # is_robolectric: Optional. If True, this is a host side android test binary + # which is allowed to depend on other android targets. # include_java_resources: Optional. If True, include Java (not Android) # resources into final .jar file. # jar_excluded_patterns: Optional list of .class file patterns to exclude @@ -3357,8 +3361,6 @@ if (enable_java_templates) { # be stored in the APK. # secondary_abi_loadable_modules: Optional list of native libraries for # secondary ABI. - # uncompress_shared_libraries: Optional. True to store native shared - # libraries uncompressed and page-aligned. # proto_resources_path: The path of an zip archive containing the APK's # resources compiled to the protocol buffer format (instead of regular # binary xml + resources.arsc). @@ -3370,7 +3372,7 @@ if (enable_java_templates) { # list of string resources to keep in the base split APK for any bundle # that uses this target. # - # For 'java_binary' and 'junit_binary' targets only. Ignored by others: + # For 'java_binary' and 'robolectric_binary' targets only. Ignored by others: # # wrapper_script_name: Optional name for the generated wrapper script. # Default is main target name. @@ -3383,24 +3385,28 @@ if (enable_java_templates) { forward_variables_from(invoker, [ "testonly" ]) _is_prebuilt = defined(invoker.jar_path) - _is_annotation_processor = invoker.type == "java_annotation_processor" - _is_java_binary = - invoker.type == "java_binary" || invoker.type == "junit_binary" + _type = invoker.type + _is_annotation_processor = _type == "java_annotation_processor" + _is_java_binary = _type == "java_binary" || _type == "robolectric_binary" + _is_library = _type == "java_library" _supports_android = defined(invoker.supports_android) && invoker.supports_android _requires_android = defined(invoker.requires_android) && invoker.requires_android + _supports_host = !_requires_android + if (_is_java_binary || _is_annotation_processor) { + assert(!_requires_android && !_supports_android) + } + + _bypass_platform_checks = defined(invoker.bypass_platform_checks) && + invoker.bypass_platform_checks + _is_robolectric = defined(invoker.is_robolectric) && invoker.is_robolectric _invoker_deps = [] if (defined(invoker.deps)) { _invoker_deps += invoker.deps } if (defined(invoker.public_deps)) { - foreach(_public_dep, invoker.public_deps) { - if (filter_include([ _public_dep ], _invoker_deps) != []) { - assert(false, "'public_deps' and 'deps' overlap: $_public_dep") - } - } _invoker_deps += invoker.public_deps } @@ -3409,19 +3415,16 @@ if (enable_java_templates) { _main_target_name = invoker.main_target_name } - if (defined(invoker.resources_package)) { - _resources_package = invoker.resources_package - } - - _java_files = [] + _source_files = [] if (defined(invoker.sources)) { - _java_files = invoker.sources + _source_files = invoker.sources } + _srcjar_deps = [] if (defined(invoker.srcjar_deps)) { _srcjar_deps = invoker.srcjar_deps } - _has_sources = _java_files != [] || _srcjar_deps != [] + _has_sources = _source_files != [] || _srcjar_deps != [] if (_is_prebuilt) { assert(!_has_sources) @@ -3432,14 +3435,13 @@ if (enable_java_templates) { } if (_is_java_binary) { - assert(defined(invoker.main_class), - "${invoker.type}() must set main_class") + assert(defined(invoker.main_class), "${_type}() must set main_class") } else if (_is_annotation_processor) { assert(defined(invoker.main_class), "java_annotation_processor() must set main_class") } else { assert(!defined(invoker.main_class), - "main_class cannot be used for target of type ${invoker.type}") + "main_class cannot be used for target of type ${_type}") } if (defined(invoker.chromium_code)) { @@ -3449,16 +3451,16 @@ if (enable_java_templates) { _chromium_code = filter_exclude([ get_label_info(":$_main_target_name", "dir") ], [ "*\bthird_party\b*" ]) != [] - if (!_chromium_code && !_is_prebuilt && _java_files != []) { + if (!_chromium_code && !_is_prebuilt && _source_files != []) { # Unless third_party code has an org.chromium file in it. _chromium_code = - filter_exclude(_java_files, [ "*\bchromium\b*" ]) != _java_files + filter_exclude(_source_files, [ "*\bchromium\b*" ]) != _source_files } } # Define build_config_deps which will be a list of targets required to # build the _build_config. - _build_config = "$target_gen_dir/$_main_target_name.build_config" + _build_config = "$target_gen_dir/$_main_target_name.build_config.json" _build_config_target_name = "${_main_target_name}$build_config_target_suffix" @@ -3468,16 +3470,25 @@ if (enable_java_templates) { if (_is_prebuilt || _has_sources) { if (defined(invoker.output_name)) { _output_name = invoker.output_name - } else if (_is_prebuilt) { - _output_name = get_path_info(invoker.jar_path, "name") } else { _output_name = _main_target_name } - _build_host_jar = _is_java_binary || _is_annotation_processor || - invoker.type == "java_library" - _build_device_jar = - invoker.type != "system_java_library" && _supports_android + _build_host_jar = + _is_java_binary || _is_annotation_processor || _type == "java_library" + _build_device_jar = _type != "system_java_library" && _supports_android + + _jacoco_instrument = + use_jacoco_coverage && _chromium_code && _source_files != [] && + _build_device_jar && (!defined(invoker.testonly) || !invoker.testonly) + if (defined(invoker.jacoco_never_instrument)) { + _jacoco_instrument = + !invoker.jacoco_never_instrument && _jacoco_instrument + } + if (_jacoco_instrument) { + _invoker_deps += [ _jacoco_dep ] + } + if (_build_host_jar) { # Jar files can be needed at runtime (by Robolectric tests or java binaries), # so do not put them under obj/. @@ -3488,16 +3499,27 @@ if (enable_java_templates) { "$root_out_dir/lib.java$_target_dir_name/$_output_name.jar" } if (_build_device_jar) { - _device_processed_jar_path = - "$target_out_dir/$_output_name.processed.jar" _dex_path = "$target_out_dir/$_main_target_name.dex.jar" _enable_desugar = !defined(invoker.enable_desugar) || invoker.enable_desugar + + # Build speed optimization: Skip "process device" step if the step + # would be just a copy and avoid the copy. + _process_device_jar = + defined(invoker.bytecode_rewriter_target) || _jacoco_instrument || + defined(invoker.jar_excluded_patterns) || + defined(invoker.jar_included_patterns) + if (!_process_device_jar && _is_prebuilt) { + _device_processed_jar_path = invoker.jar_path + } else { + _device_processed_jar_path = + "$target_out_dir/$_output_name.processed.jar" + } } # For static libraries, the javac jar output is created at the intermediate # path so that it can be processed by another target and moved to the final - # spot that the .build_config knows about. Technically this should be done + # spot that the .build_config.json knows about. Technically this should be done # for the ijar as well, but this is only used for APK targets where # the ijar path isn't actually used. if (_has_sources) { @@ -3507,7 +3529,11 @@ if (enable_java_templates) { } if (_has_sources) { - _javac_jar_path = "$target_out_dir/$_main_target_name.javac.jar" + if (_build_device_jar && !_process_device_jar) { + _javac_jar_path = _device_processed_jar_path + } else { + _javac_jar_path = "$target_out_dir/$_main_target_name.javac.jar" + } _generated_jar_path = "$target_gen_dir/$_main_target_name.generated.srcjar" } @@ -3519,87 +3545,89 @@ if (enable_java_templates) { } } - if (_is_prebuilt || _has_sources) { - _java_res_deps = [] - _java_header_deps = [] - _java_impl_deps = [] - _non_java_deps = [] - foreach(_dep, _invoker_deps) { - _target_label = get_label_info(_dep, "label_no_toolchain") - if (filter_exclude([ _target_label ], _java_resource_patterns) == []) { - _java_res_deps += [ _dep ] - } else if (filter_exclude([ _target_label ], _java_library_patterns) == - []) { - # This is a java library dep, so it has header and impl targets. - _java_header_deps += [ "${_target_label}__header" ] - _java_impl_deps += [ "${_target_label}__impl" ] - } else { - _non_java_deps += [ _dep ] - } - } + _java_assetres_deps = filter_include(_invoker_deps, java_resource_patterns) - # Don't need to depend on the apk-under-test to be packaged. - if (defined(invoker.apk_under_test)) { - _java_header_deps += [ "${invoker.apk_under_test}__java__header" ] - _java_impl_deps += [ "${invoker.apk_under_test}__java__impl" ] - } + # Cannot use minus operator because it does not work when the operand has + # repeated entries. + _invoker_deps_minus_assetres = + filter_exclude(_invoker_deps, _java_assetres_deps) + _lib_deps = + filter_include(_invoker_deps_minus_assetres, java_library_patterns) + _non_java_deps = filter_exclude(_invoker_deps_minus_assetres, _lib_deps) - # These deps cannot be passed via invoker.deps since bundle_module targets - # have bundle_module.build_config without the __java suffix, so they are - # special and cannot be passed as regular deps to write_build_config. - if (defined(invoker.base_module_target)) { - _java_header_deps += [ "${invoker.base_module_target}__java__header" ] - _java_impl_deps += [ "${invoker.base_module_target}__java__impl" ] - } + _java_header_deps = [] # Turbine / ijar - _extra_java_deps = [] - _jacoco_instrument = - use_jacoco_coverage && _chromium_code && _java_files != [] && - _build_device_jar && (!defined(invoker.testonly) || !invoker.testonly) - if (defined(invoker.jacoco_never_instrument)) { - _jacoco_instrument = - !invoker.jacoco_never_instrument && _jacoco_instrument - } - if (_jacoco_instrument) { - _extra_java_deps += [ "//third_party/jacoco:jacocoagent_java" ] - } + # It would be more ideal to split this into __host and __javac, but we + # combine the two concepts to save on a group() target. + _java_host_deps = [] # Processed host .jar + javac .jar. + _java_validate_deps = [] # Bytecode checker & errorprone. + + foreach(_lib_dep, _lib_deps) { + # Expand //foo/java -> //foo/java:java + _lib_dep = get_label_info(_lib_dep, "label_no_toolchain") + _java_assetres_deps += [ "${_lib_dep}__assetres" ] + _java_header_deps += [ "${_lib_dep}__header" ] + _java_host_deps += [ "${_lib_dep}__host" ] + _java_validate_deps += [ "${_lib_dep}__validate" ] + } + + # APK and base module targets are special because: + # 1) They do not follow java target naming scheme (since they are not + # generally deps, there is no need for them to). + # 2) They do not bother to define a __host target. + # Since __host is used as an indirect dep for the compile_java artifacts, + # add the __compile_java target directly for them. + if (defined(invoker.apk_under_test)) { + _java_assetres_deps += [ "${invoker.apk_under_test}__java__assetres" ] + _java_header_deps += [ "${invoker.apk_under_test}__java__header" ] + _java_validate_deps += [ "${invoker.apk_under_test}__java__validate" ] + _java_host_deps += [ "${invoker.apk_under_test}__compile_java" ] + } + if (defined(invoker.base_module_target)) { + _java_assetres_deps += [ "${invoker.base_module_target}__java__assetres" ] + _java_header_deps += [ "${invoker.base_module_target}__java__header" ] + _java_validate_deps += [ "${invoker.base_module_target}__java__validate" ] + _java_host_deps += [ "${invoker.base_module_target}__compile_java" ] + } + + not_needed([ "_non_java_deps" ]) + + if (_is_prebuilt || _has_sources) { + # Classpath deps are used for header and dex targets, they do not need + # __assetres deps. + # _non_java_deps are needed for input_jars_paths that are generated. + _header_classpath_deps = + _java_header_deps + _non_java_deps + [ ":$_build_config_target_name" ] + + _javac_classpath_deps = + _java_host_deps + _non_java_deps + [ ":$_build_config_target_name" ] _include_android_sdk = _build_device_jar if (defined(invoker.include_android_sdk)) { _include_android_sdk = invoker.include_android_sdk } if (_include_android_sdk) { - _sdk_java_dep = "//third_party/android_sdk:android_sdk_java" if (defined(invoker.alternative_android_sdk_dep)) { - _sdk_java_dep = invoker.alternative_android_sdk_dep + _android_sdk_dep = invoker.alternative_android_sdk_dep + } else { + _android_sdk_dep = default_android_sdk_dep } - # This is an android_system_java_prebuilt target, so no headers. - _extra_java_deps += [ _sdk_java_dep ] + _header_classpath_deps += [ "${_android_sdk_dep}__header" ] + _javac_classpath_deps += [ "${_android_sdk_dep}" ] } - - # Classpath deps is used for header and dex targets, they do not need - # resource deps. - _classpath_deps = _java_header_deps + _non_java_deps + _extra_java_deps + - [ ":$_build_config_target_name" ] - - _full_classpath_deps = - _java_impl_deps + _java_res_deps + _non_java_deps + _extra_java_deps + - [ ":$_build_config_target_name" ] } # Often needed, but too hard to figure out when ahead of time. not_needed([ - "_classpath_deps", - "_full_classpath_deps", + "_header_classpath_deps", + "_javac_classpath_deps", ]) - if (_java_files != []) { - _java_sources_file = "$target_gen_dir/$_main_target_name.sources" - if (defined(invoker.java_sources_file)) { - _java_sources_file = invoker.java_sources_file - } - write_file(_java_sources_file, rebase_path(_java_files, root_build_dir)) + if (_source_files != []) { + _target_sources_file = "$target_gen_dir/$_main_target_name.sources" + write_file(_target_sources_file, + rebase_path(_source_files, root_build_dir)) } write_build_config(_build_config_target_name) { @@ -3610,21 +3638,28 @@ if (enable_java_templates) { "base_allowlist_rtxt_path", "gradle_treat_as_prebuilt", "input_jars_paths", + "preferred_dep", "low_classpath_priority", "main_class", + "mergeable_android_manifests", + "module_name", + "parent_module_target", "proguard_configs", "proguard_enabled", "proguard_mapping_path", "public_target_label", "r_text_path", "type", + "version_code", + "version_name", ]) - if (type == "android_apk" || type == "android_app_bundle_module") { + if (_type == "android_apk" || _type == "android_app_bundle_module") { forward_variables_from( invoker, [ "android_manifest", "android_manifest_dep", + "merged_android_manifest", "final_dex_path", "loadable_modules", "native_lib_placeholders", @@ -3633,13 +3668,10 @@ if (enable_java_templates) { "secondary_abi_shared_libraries_runtime_deps_file", "secondary_native_lib_placeholders", "shared_libraries_runtime_deps_file", - "static_library_dependent_targets", - "uncompress_shared_libraries", "library_always_compress", - "library_renames", ]) } - if (type == "android_apk") { + if (_type == "android_apk") { forward_variables_from(invoker, [ "apk_path", @@ -3648,15 +3680,13 @@ if (enable_java_templates) { "incremental_install_json_path", ]) } - if (type == "android_app_bundle_module") { + if (_type == "android_app_bundle_module") { forward_variables_from(invoker, [ + "add_view_trace_events", "base_module_target", - "is_base_module", "module_pathmap_path", "proto_resources_path", - "version_name", - "version_code", ]) } chromium_code = _chromium_code @@ -3665,25 +3695,30 @@ if (enable_java_templates) { # Specifically avoid passing in invoker.base_module_target as one of the # possible_config_deps. - possible_config_deps = _invoker_deps - if (defined(_extra_java_deps)) { - possible_config_deps += _extra_java_deps + possible_config_deps = [] + if (defined(invoker.deps)) { + possible_config_deps = invoker.deps + } + if (defined(invoker.public_deps)) { + possible_config_public_deps = invoker.public_deps } if (defined(apk_under_test)) { possible_config_deps += [ apk_under_test ] } - - if (defined(invoker.public_deps)) { - possible_config_public_deps = invoker.public_deps + if (defined(_jacoco_instrument) && _jacoco_instrument) { + possible_config_deps += [ _jacoco_dep ] + } + if (defined(_android_sdk_dep)) { + possible_config_deps += [ _android_sdk_dep ] } supports_android = _supports_android requires_android = _requires_android - bypass_platform_checks = defined(invoker.bypass_platform_checks) && - invoker.bypass_platform_checks + is_robolectric = _is_robolectric + bypass_platform_checks = _bypass_platform_checks - if (defined(_resources_package)) { - custom_package = _resources_package + if (defined(invoker.resources_package)) { + custom_package = invoker.resources_package } if (_is_prebuilt || _has_sources) { ijar_path = _final_ijar_path @@ -3696,8 +3731,8 @@ if (enable_java_templates) { device_jar_path = _device_processed_jar_path dex_path = _dex_path } - if (_java_files != []) { - java_sources_file = _java_sources_file + if (_source_files != []) { + target_sources_file = _target_sources_file } bundled_srcjars = [] @@ -3722,9 +3757,10 @@ if (enable_java_templates) { _header_target_name = "${target_name}__header" } - _public_deps = [] - _analysis_public_deps = [] if (_has_sources) { + _kt_files = filter_include(_source_files, [ "*.kt" ]) + _java_files = filter_exclude(_source_files, [ "*.kt" ]) + if (defined(invoker.enable_errorprone)) { _enable_errorprone = invoker.enable_errorprone } else { @@ -3732,31 +3768,56 @@ if (enable_java_templates) { _java_files != [] && _chromium_code && use_errorprone_java_compiler } - _type = invoker.type - - _uses_fake_rjava = _type == "java_library" && _requires_android + if (defined(invoker.resources_package) && _type == "java_library") { + # TODO(crbug.com/1296632): remove _bypass_platform_checks from the list + # once all robolectric targets have migrated to robolectric_library. + assert(_requires_android || _bypass_platform_checks || _is_robolectric, + "Setting resources_package applicable only for " + + "android_library(), or robolectric_library(). " + + "Target=$target_name") - if (_uses_fake_rjava && defined(_resources_package)) { - # has _resources at the end so it looks like a resources pattern, since - # it does act like one (and other resources patterns need to depend on - # this before they can read its output R.txt). - _fake_rjava_target = "${target_name}__rjava_resources" - _possible_resource_deps = _invoker_deps + # Serves double purpose: Generating R.java, as well as being the + #__assetres target (instead of using a separate group). + _fake_rjava_target = "${target_name}__assetres" generate_r_java(_fake_rjava_target) { - deps = [ ":$_build_config_target_name" ] - if (defined(_possible_resource_deps)) { - possible_resource_deps = _possible_resource_deps - } + deps = [ ":$_build_config_target_name" ] + _java_assetres_deps + + _non_java_deps build_config = _build_config # Filepath has to be exactly this because compile_java looks for the # srcjar of srcjar_deps at this location $gen_dir/$target_name.srcjar srcjar_path = "$target_gen_dir/$target_name.srcjar" - package = _resources_package + package = invoker.resources_package } _srcjar_deps += [ ":$_fake_rjava_target" ] } + if (_kt_files != []) { + _kt_allowlist = [ + "android/java/src/org/chromium/chrome/browser/tabmodel/AsyncTabParamsManagerImpl.kt", + "webengine_shell_apk/src/org/chromium/webengine/shell/*.kt", + ] + assert(filter_exclude(_kt_files, _kt_allowlist) == [], + "Only a files in the allowlist can be included for now. Feel " + + "free to remove this assert when experimenting locally.") + _compile_kt_target_name = "${_main_target_name}__compile_kt" + _kotlinc_jar_path = "$target_out_dir/$_output_name.kotlinc.jar" + _kotlin_interface_jar_path = + "$target_out_dir/$_output_name.kt-jvm-abi.jar" + compile_kt(_compile_kt_target_name) { + deps = _header_classpath_deps + output_jar_path = _kotlinc_jar_path + output_interface_jar_path = _kotlin_interface_jar_path + main_target_name = _main_target_name + build_config = _build_config + srcjar_deps = _srcjar_deps + source_files = _source_files + target_sources_file = _target_sources_file + chromium_code = _chromium_code + include_android_sdk = _is_robolectric || _requires_android + } + } + template("compile_java_helper") { _enable_errorprone = defined(invoker.enable_errorprone) && invoker.enable_errorprone @@ -3770,11 +3831,22 @@ if (enable_java_templates) { # Filtering out generated files resulted in no files left. group(target_name) { not_needed(invoker, "*") + deps = _header_classpath_deps } } else { compile_java(target_name) { - forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, + "*", + TESTONLY_AND_VISIBILITY + [ "deps" ]) + deps = _header_classpath_deps + if (defined(invoker.deps)) { + deps += invoker.deps + } output_jar_path = invoker.output_jar_path + if (defined(invoker.kotlin_jar_path)) { + deps += [ ":$_compile_kt_target_name" ] + kotlin_jar_path = invoker.kotlin_jar_path + } enable_errorprone = _enable_errorprone use_turbine = defined(invoker.use_turbine) && invoker.use_turbine @@ -3782,22 +3854,17 @@ if (enable_java_templates) { build_config = _build_config if (_enable_errorprone) { - java_files = _filtered_java_files + source_files = _filtered_java_files } else { - java_files = _java_files + source_files = _source_files srcjar_deps = _srcjar_deps } - if (java_files != []) { - java_sources_file = _java_sources_file + if (source_files != []) { + target_sources_file = _target_sources_file } chromium_code = _chromium_code - supports_android = _supports_android - requires_android = _requires_android - if (!defined(deps)) { - deps = [] - } - deps += _classpath_deps + include_android_sdk = _is_robolectric || _requires_android } } } @@ -3820,8 +3887,10 @@ if (enable_java_templates) { output_jar_path = _final_ijar_path generated_jar_path = _generated_jar_path deps = _annotation_processor_deps + if (_kt_files != []) { + kotlin_jar_path = _kotlin_interface_jar_path + } } - _public_deps += [ ":$_header_target_name" ] _compile_java_target = "${_main_target_name}__compile_java" compile_java_helper(_compile_java_target) { @@ -3830,6 +3899,9 @@ if (enable_java_templates) { deps = [ ":$_header_target_name" ] header_jar_path = _final_ijar_path generated_jar_path = _generated_jar_path + if (_kt_files != []) { + kotlin_jar_path = _kotlinc_jar_path + } } if (_enable_errorprone) { _compile_java_errorprone_target = "${_main_target_name}__errorprone" @@ -3843,18 +3915,23 @@ if (enable_java_templates) { javac_args += invoker.errorprone_args } deps = [ ":$_header_target_name" ] + if (_kt_files != []) { + kotlin_jar_path = _kotlinc_jar_path + } header_jar_path = _final_ijar_path generated_jar_path = _generated_jar_path output_jar_path = "$target_out_dir/$target_name.errorprone.stamp" } - _analysis_public_deps += [ ":$_compile_java_errorprone_target" ] + _java_validate_deps += [ ":$_compile_java_errorprone_target" ] } } # _has_sources if (_is_prebuilt || _build_device_jar || _build_host_jar) { - _unprocessed_jar_deps = [] if (_has_sources) { - _unprocessed_jar_deps += [ ":$_compile_java_target" ] + _unprocessed_jar_deps = [ ":$_compile_java_target" ] + } else { + # jars might be generated by a dep. + _unprocessed_jar_deps = _non_java_deps } } @@ -3890,7 +3967,7 @@ if (enable_java_templates) { "--output-jar", rebase_path(_rewritten_jar, root_build_dir), ] - deps = _unprocessed_jar_deps + _full_classpath_deps + + deps = _unprocessed_jar_deps + _javac_classpath_deps + [ invoker.bytecode_rewriter_target ] } @@ -3907,120 +3984,147 @@ if (enable_java_templates) { input_jar = _unprocessed_jar_path output_jar = _final_ijar_path - # Normally ijar does not require any deps, but: - # 1 - Some jars are bytecode rewritten by _unprocessed_jar_deps. - # 2 - Other jars need to be unzipped by _non_java_deps. - # 3 - It is expected that depending on a header target implies depending - # on its transitive header target deps via _java_header_deps. - deps = _unprocessed_jar_deps + _non_java_deps + _java_header_deps + # ijar needs only _unprocessed_jar_deps, but this also needs to export + # __header target from deps. + deps = _unprocessed_jar_deps + _java_header_deps } - _public_deps += [ ":$_header_target_name" ] } if (_build_host_jar || _build_device_jar) { - _process_prebuilt_target_name = "${target_name}__process" - process_java_prebuilt(_process_prebuilt_target_name) { - forward_variables_from(invoker, - [ - "jar_excluded_patterns", - "jar_included_patterns", - ]) - build_config = _build_config - build_config_dep = ":$_build_config_target_name" - input_jar_path = _unprocessed_jar_path - jar_deps = _unprocessed_jar_deps + _full_classpath_deps - if (_build_host_jar) { - host_jar_path = _host_processed_jar_path - } - if (_build_device_jar) { - device_jar_path = _device_processed_jar_path - jacoco_instrument = _jacoco_instrument - if (_jacoco_instrument) { - java_files = _java_files - java_sources_file = _java_sources_file - } - enable_desugar = _enable_desugar && enable_bazel_desugar - if (enable_desugar) { - classpath_deps = _classpath_deps - forward_variables_from(invoker, [ "desugar_jars_paths" ]) - } - } - - # proguard_configs listed on java_library targets need to be marked - # as inputs to at least one action so that "gn analyze" will know - # about them. Although ijar doesn't use them, it's a convenient spot - # to list them. - # https://crbug.com/827197 - if (defined(invoker.proguard_configs)) { - inputs = invoker.proguard_configs - input_deps = _non_java_deps + _srcjar_deps # For the aapt-generated - # proguard rules. - } - } - if (_build_host_jar) { - _public_deps += [ ":${_process_prebuilt_target_name}_host" ] - } - if (_build_device_jar) { - _public_deps += [ ":${_process_prebuilt_target_name}_device" ] - } - - _enable_bytecode_checks = !defined(invoker.enable_bytecode_checks) || - invoker.enable_bytecode_checks + _enable_bytecode_checks = + (!defined(invoker.enable_bytecode_checks) || + invoker.enable_bytecode_checks) && android_static_analysis != "off" if (_enable_bytecode_checks) { - _bytecode_checks_target = "${target_name}__validate_classpath" - bytecode_processor(_bytecode_checks_target) { + _validate_target_name = "${target_name}__validate" + bytecode_processor(_validate_target_name) { forward_variables_from(invoker, [ "missing_classes_allowlist" ]) - deps = _unprocessed_jar_deps + _full_classpath_deps + + deps = _unprocessed_jar_deps + _javac_classpath_deps + [ ":$_build_config_target_name" ] - requires_android = _requires_android + data_deps = _java_validate_deps + if (defined(_compile_java_errorprone_target)) { + data_deps += [ ":$_compile_java_errorprone_target" ] + } + + include_android_sdk = _requires_android || _is_robolectric target_label = get_label_info(":${invoker.target_name}", "label_no_toolchain") input_jar = _unprocessed_jar_path build_config = _build_config is_prebuilt = _is_prebuilt } - _analysis_public_deps += [ ":$_bytecode_checks_target" ] + } else { + not_needed(invoker, [ "missing_classes_allowlist" ]) } - } - if (_build_device_jar) { - dex("${target_name}__dex") { - forward_variables_from(invoker, - [ - "desugar_jars_paths", - "proguard_enable_obfuscation", - ]) - input_class_jars = [ _device_processed_jar_path ] - enable_desugar = _enable_desugar - ignore_desugar_missing_deps = !_enable_bytecode_checks - - # There's no value in per-class dexing prebuilts since they never - # change just one class at a time. - disable_incremental = _is_prebuilt - output = _dex_path - deps = [ ":${_process_prebuilt_target_name}_device" ] - - if (enable_desugar && !enable_bazel_desugar) { - # Desugaring with D8 requires full classpath. - build_config = _build_config - final_ijar_path = _final_ijar_path - deps += _classpath_deps + [ ":$_header_target_name" ] + if (_build_host_jar) { + _process_host_jar_target_name = "${target_name}__host" + process_java_library(_process_host_jar_target_name) { + forward_variables_from(invoker, + [ + "jar_excluded_patterns", + "jar_included_patterns", + ]) + + # Robolectric tests require these to be on swarming. + data = [ _host_processed_jar_path ] + input_jar_path = _unprocessed_jar_path + deps = _unprocessed_jar_deps + _javac_classpath_deps + output_jar_path = _host_processed_jar_path + jacoco_instrument = _jacoco_instrument + if (_jacoco_instrument) { + source_files = _source_files + target_sources_file = _target_sources_file + } + + # _java_host_deps isn't necessary for process_java_library(), but is + # necessary so that this target can be used to depend on transitive + # __device targets without the need to create a separate group() + # target. This trade-off works because process_java_library is fast. + deps += _java_host_deps + + # Add runtime_deps here since robolectric_binary does not depend on top-level group. + if (defined(invoker.data)) { + data += invoker.data + } + if (defined(invoker.data_deps)) { + data_deps = invoker.data_deps + } + } + } + + if (_build_device_jar) { + if (_process_device_jar) { + _process_device_jar_target_name = "${target_name}__process_device" + process_java_library(_process_device_jar_target_name) { + forward_variables_from(invoker, + [ + "jar_excluded_patterns", + "jar_included_patterns", + ]) + input_jar_path = _unprocessed_jar_path + + deps = _unprocessed_jar_deps + _javac_classpath_deps + output_jar_path = _device_processed_jar_path + jacoco_instrument = _jacoco_instrument + if (_jacoco_instrument) { + source_files = _source_files + target_sources_file = _target_sources_file + } + } + _process_device_jar_deps = [ ":${_process_device_jar_target_name}" ] + } else { + assert(_unprocessed_jar_path == _device_processed_jar_path) + _process_device_jar_deps = _unprocessed_jar_deps } - enable_multidex = false - is_library = true + _dex_target_name = "${target_name}__dex" + dex(_dex_target_name) { + forward_variables_from(invoker, [ "proguard_enable_obfuscation" ]) + input_class_jars = [ _device_processed_jar_path ] + enable_desugar = _enable_desugar + ignore_desugar_missing_deps = !_enable_bytecode_checks + + # There's no value in per-class dexing prebuilts since they never + # change just one class at a time. + disable_incremental = _is_prebuilt + output = _dex_path + deps = _process_device_jar_deps + + if (enable_desugar) { + # Desugaring with D8 requires full classpath. + build_config = _build_config + unprocessed_jar_path = _unprocessed_jar_path + deps += _header_classpath_deps + _unprocessed_jar_deps + } + + enable_multidex = false + is_library = true + + # proguard_configs listed on java_library targets need to be marked + # as inputs to at least one target so that "gn analyze" will know + # about them. Although this target doesn't use them, it's a convenient spot + # to list them. + # https://crbug.com/827197 + if (compute_inputs_for_analyze && defined(invoker.proguard_configs)) { + inputs = invoker.proguard_configs + + # For the aapt-generated proguard rules. + deps += _non_java_deps + _srcjar_deps + } + } } - _public_deps += [ ":${target_name}__dex" ] } if (_is_java_binary) { # Targets might use the generated script while building, so make it a dep # rather than a data_dep. - java_binary_script("${target_name}__java_binary_script") { + _java_binary_script_target_name = "${target_name}__java_binary_script" + java_binary_script(_java_binary_script_target_name) { forward_variables_from(invoker, [ "tiered_stop_at_level_one", "main_class", + "max_heap_size", "wrapper_script_args", ]) build_config = _build_config @@ -4029,46 +4133,116 @@ if (enable_java_templates) { script_name = invoker.wrapper_script_name } deps = [ ":$_build_config_target_name" ] + if (_is_robolectric) { + # For robolectric tests, we also add the normal sdk jar to the + # classpath since whenever we start using a new Android SDK, + # robolectric doesn't support it, and they often take a few months to + # support it. This causes issues when mocking classes that reference + # new SDK classes, so providing our normal SDK will allow these + # classes to resolve. For an example, see crbug.com/1350963. + extra_classpath_jars = [ android_sdk_jar ] + + # Mockito bug with JDK17 requires us to use JDK11 until we find a fix + # for crbug.com/1409661. + use_jdk_11 = true + } } - _public_deps += [ ":${target_name}__java_binary_script" ] } - # The __impl target contains all non-analysis steps for this template. - # Having this separated out from the main target (which contains analysis - # steps) allows analysis steps for this target to be run concurrently with - # the non-analysis steps of other targets that depend on this one. - group("${target_name}__impl") { - public_deps = _public_deps + if (!defined(_validate_target_name)) { + _validate_target_name = "${target_name}__validate" + + # Allow other targets to depend on this __validate one. + group(_validate_target_name) { + deps = _java_validate_deps + } } - java_lib_group("${target_name}__assetres") { - deps = _invoker_deps - group_name = "assetres" + if (_supports_host && !defined(_process_host_jar_target_name)) { + group("${target_name}__host") { + deps = _java_host_deps + } + } - if (defined(_fake_rjava_target)) { - deps += [ ":$_fake_rjava_target" ] + # robolectric_library can depend on java_library, so java_library must + # define __assetres. + if ((_is_library || _supports_android || _is_robolectric) && + !defined(_fake_rjava_target)) { + group("${target_name}__assetres") { + if (_supports_android || _is_robolectric) { + deps = _java_assetres_deps + } } } + # The top-level group is used: + # 1) To allow building the target explicitly via ninja, + # 2) To trigger all analysis deps, + # 3) By custom action() targets that want to use artifacts as inputs. group(target_name) { forward_variables_from(invoker, [ "assert_no_deps", "data", "data_deps", - "deps", - "public_deps", "visibility", ]) - if (!defined(public_deps)) { + if (_requires_android || (_supports_android && _is_library)) { + # For non-robolectric targets, depend on other java target's top-level + # groups so that the __dex step gets depended on. + forward_variables_from(invoker, + [ + "deps", + "public_deps", + ]) + if (!defined(deps)) { + deps = [] + } + if (!defined(public_deps)) { + public_deps = [] + } + } else { + # For robolectric targets, depend only on non-java deps and the specific + # subtargets below, which will not include __dex. + deps = _non_java_deps public_deps = [] - } - public_deps += [ ":${target_name}__impl" ] - if (defined(_analysis_public_deps)) { - if (!defined(data_deps)) { - data_deps = [] + if (defined(invoker.public_deps)) { + public_deps += + filter_exclude(invoker.public_deps, java_target_patterns) } - data_deps += _analysis_public_deps + } + if (defined(_jacoco_instrument) && _jacoco_instrument) { + deps += [ _jacoco_dep ] + } + if (defined(invoker.apk_under_test)) { + deps += [ invoker.apk_under_test ] + } + if (defined(_process_device_jar_target_name)) { + public_deps += [ ":$_process_device_jar_target_name" ] + } + if (defined(_dex_target_name)) { + public_deps += [ ":$_dex_target_name" ] + } + if (_supports_android && _is_library) { + # Robolectric targets define __assetres, but there's no need to build it + # by default. + public_deps += [ ":${target_name}__assetres" ] + } + if (_supports_host) { + # android_* targets define __host, but there's no need to build it by + # default. + public_deps += [ ":${target_name}__host" ] + } + if (_is_java_binary) { + public_deps += [ ":$_java_binary_script_target_name" ] + } + if (!defined(data_deps)) { + data_deps = [] + } + if (defined(_validate_target_name)) { + data_deps += [ ":$_validate_target_name" ] + } else { + data_deps += _java_validate_deps } } } @@ -4109,8 +4283,6 @@ template("create_android_app_bundle_module") { _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) _rebased_native_libraries_config = rebase_path(invoker.native_libraries_config, root_build_dir) - _proguard_enabled = - defined(invoker.proguard_enabled) && invoker.proguard_enabled forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) _deps = invoker.deps @@ -4121,7 +4293,7 @@ template("create_android_app_bundle_module") { # by apkbuild.py --format=bundle-module. This means not using # apksigner and zipalign as well, nor the keystore. Other # dependencies like extra native libraries are all pulled from the - # .build_config through @FileArg() references (see below) and + # .build_config.json through @FileArg() references (see below) and # will be listed in the generated depfile instead. _inputs = [ invoker.build_config, @@ -4145,10 +4317,7 @@ template("create_android_app_bundle_module") { ":native:secondary_native_library_placeholders)", "--android-abi=$android_app_abi", "--min-sdk-version=${invoker.min_sdk_version}", - "--uncompress-shared-libraries=@FileArg(" + - "$_rebased_build_config:native:uncompress_shared_libraries)", "--library-always-compress=@FileArg($_rebased_build_config:native:library_always_compress)", - "--library-renames=@FileArg($_rebased_build_config:native:library_renames)", ] if (defined(android_app_secondary_abi)) { _rebased_secondary_abi_native_libraries_config = @@ -4167,6 +4336,9 @@ template("create_android_app_bundle_module") { if (defined(invoker.uncompress_dex) && invoker.uncompress_dex) { _args += [ "--uncompress-dex" ] } + if (defined(invoker.extra_assets)) { + _args += [ "--assets=${invoker.extra_assets}" ] + } # Use either provided dex path or build config path based on type of module. if (defined(invoker.dex_path)) { @@ -4177,20 +4349,6 @@ template("create_android_app_bundle_module") { _args += [ "--dex-file=@FileArg($_rebased_build_config:final_dex:path)" ] } - # The library is imported via proguard when proguard is enabled. - if (!_proguard_enabled && enable_jdk_library_desugaring && - invoker.module_name == "base") { - _all_jdk_libs = "//build/android:all_jdk_libs" - _deps += [ _all_jdk_libs ] - _jdk_libs_dex = - get_label_info(_all_jdk_libs, "target_out_dir") + "/all_jdk_libs.l8.dex" - _inputs += [ _jdk_libs_dex ] - _args += [ - "--jdk-libs-dex-file", - rebase_path(_jdk_libs_dex, root_build_dir), - ] - } - if (treat_warnings_as_errors) { _args += [ "--warnings-as-errors" ] } @@ -4255,59 +4413,6 @@ template("create_android_app_bundle_module") { } } -# Splits input dex file(s) based on given feature jars into seperate dex files -# for each feature. -# -# Variables: -# proguard_mapping: Path to input proguard mapping produced by synchronized -# proguarding. -# input_dex_zip: Path to zipped dex files to split. -# all_modules: Path to list of all modules. Each Module must have -# build_config, name, and build_config_target properties. -# feature_jars_args: Optional list of args to be passed to dexsplitter.py. -# If used should include the jars owned by each feature (in the same order -# as all_modules). Allows invoker to pull the list of jars from a different -# .build_config than the module's .build_config. -template("dexsplitter") { - action_with_pydeps(target_name) { - forward_variables_from(invoker, [ "deps" ]) - script = "//build/android/gyp/dexsplitter.py" - _stamp = "${target_gen_dir}/${target_name}.stamp" - outputs = [ _stamp ] - - depfile = "${target_gen_dir}/${target_name}.d" - args = [ - "--stamp", - rebase_path(_stamp, root_build_dir), - "--depfile", - rebase_path(depfile, root_build_dir), - "--r8-path", - rebase_path(_r8_path, root_build_dir), - "--input-dex-zip", - rebase_path(invoker.input_dex_zip, root_build_dir), - "--proguard-mapping-file", - rebase_path(invoker.proguard_mapping, root_build_dir), - ] - - foreach(_feature_module, invoker.all_modules) { - _rebased_module_build_config = - rebase_path(_feature_module.build_config, root_build_dir) - args += [ - "--feature-name", - _feature_module.name, - "--dex-dest=@FileArg($_rebased_module_build_config:final_dex:path)", - ] - if (!defined(invoker.feature_jars_args)) { - args += [ "--feature-jars=@FileArg($_rebased_module_build_config:deps_info:device_classpath)" ] - } - deps += [ _feature_module.build_config_target ] - } - if (defined(invoker.feature_jars_args)) { - args += invoker.feature_jars_args - } - } -} - # Allots native libraries depended on by feature modules to the module the # libraries should be packaged into. The packaging module may be different from # the dependee module in case a library is depended on by multiple modules. In diff --git a/build/config/android/linker_version_script.gni b/build/config/android/linker_version_script.gni index 96d8b665d1ce..864233c8c70b 100644 --- a/build/config/android/linker_version_script.gni +++ b/build/config/android/linker_version_script.gni @@ -1,7 +1,8 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +import("//build/config/android/config.gni") import("//build/config/python.gni") # Generate a custom linker version script that can later be used with @@ -16,13 +17,17 @@ import("//build/config/python.gni") # template("generate_linker_version_script") { action_with_pydeps(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) script = "//build/android/gyp/generate_linker_version_script.py" outputs = [ invoker.linker_script ] inputs = [] args = [ "--output=" + rebase_path(invoker.linker_script, root_build_dir) ] - if (defined(invoker.export_java_symbols) && invoker.export_java_symbols) { - args += [ "--export-java-symbols" ] + if (defined(invoker.testonly) && invoker.testonly) { + args += [ "--export-fortesting-java-symbols" ] + } + if (allow_jni_multiplexing) { + args += [ "--jni-multiplexing" ] } if (defined(invoker.export_feature_registrations) && diff --git a/build/config/android/rules.gni b/build/config/android/rules.gni index b411f154af3b..a3eccbff751c 100644 --- a/build/config/android/rules.gni +++ b/build/config/android/rules.gni @@ -1,25 +1,29 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # Do not add any imports to non-//build directories here. # Some projects (e.g. V8) do not have non-build directories DEPS'ed in. - -import("//build/config/android/channel.gni") import("//build/config/android/config.gni") -import("//build/config/android/internal_rules.gni") +import("//build/config/android/copy_ex.gni") import("//build/config/clang/clang.gni") import("//build/config/compiler/compiler.gni") import("//build/config/coverage/coverage.gni") import("//build/config/python.gni") import("//build/config/rts.gni") +import("//build/config/sanitizers/sanitizers.gni") import("//build/config/zip.gni") import("//build/toolchain/toolchain.gni") - -assert(is_android) - -declare_args() { - enable_jni_tracing = false +assert(is_android || is_robolectric) + +# Use a dedicated include dir so that files can #include headers from other +# toolchains without affecting non-JNI #includes. +if (target_os == "android") { + jni_headers_dir = "$root_build_dir/gen/jni_headers" +} else { + # Chrome OS builds cannot share gen/ directories because is_android=false + # within default_toolchain. + jni_headers_dir = "$root_gen_dir/jni_headers" } if (target_cpu == "arm") { @@ -35,6 +39,9 @@ if (use_cfi_diag || is_ubsan || is_ubsan_security || is_ubsan_vptr) { _sanitizer_runtimes = [ "$clang_base_path/lib/clang/$clang_version/lib/linux/libclang_rt.ubsan_standalone-$_sanitizer_arch-android.so" ] } +_BUNDLETOOL_JAR_PATH = + "//third_party/android_build_tools/bundletool/bundletool.jar" + # Creates a dist directory for a native executable. # # Running a native executable on a device requires all the shared library @@ -121,11 +128,15 @@ template("create_native_executable_dist") { } if (enable_java_templates) { - import("//build/config/sanitizers/sanitizers.gni") + if (is_android) { + import("//build/config/android/internal_rules.gni") + } # JNI target implementation. See generate_jni or generate_jar_jni for usage. template("generate_jni_impl") { - _jni_output_dir = "${target_gen_dir}/${target_name}" + _prev_jni_output_dir = "$target_gen_dir/$target_name" + _subdir = rebase_path(target_gen_dir, root_gen_dir) + _jni_output_dir = "$jni_headers_dir/$_subdir/$target_name" if (defined(invoker.jni_generator_include)) { _jni_generator_include = invoker.jni_generator_include _jni_generator_include_deps = [] @@ -157,37 +168,58 @@ if (enable_java_templates) { public_deps = [] } public_deps += _jni_generator_include_deps + inputs = [] args = [ "--ptr_type=long", + + # TODO(agrieve): --prev_output_dir used only to make incremental builds + # work. Remove --prev_output_dir at some point after 2022. + "--prev_output_dir", + rebase_path(_prev_jni_output_dir, root_build_dir), + "--output_dir", + rebase_path(_jni_output_dir, root_build_dir), "--includes", rebase_path(_jni_generator_include, _jni_output_dir), ] if (defined(invoker.classes)) { - if (defined(invoker.jar_file)) { - _jar_file = invoker.jar_file + if (is_robolectric) { + not_needed(invoker, [ "jar_file" ]) } else { - _jar_file = android_sdk_jar + if (defined(invoker.jar_file)) { + _jar_file = invoker.jar_file + } else { + _jar_file = android_sdk_jar + } + inputs += [ _jar_file ] + args += [ + "--jar_file", + rebase_path(_jar_file, root_build_dir), + ] } - inputs += [ _jar_file ] - args += [ - "--jar_file", - rebase_path(_jar_file, root_build_dir), - ] _input_args = invoker.classes _input_names = invoker.classes if (defined(invoker.always_mangle) && invoker.always_mangle) { args += [ "--always_mangle" ] } + if (defined(invoker.unchecked_exceptions) && + invoker.unchecked_exceptions) { + args += [ "--unchecked_exceptions" ] + } } else { assert(defined(invoker.sources)) inputs += invoker.sources _input_args = rebase_path(invoker.sources, root_build_dir) _input_names = invoker.sources - if (use_hashed_jni_names) { + if (!is_robolectric && use_hashed_jni_names) { args += [ "--use_proxy_hash" ] } + + if (!is_robolectric && defined(invoker.enable_jni_multiplexing) && + invoker.enable_jni_multiplexing) { + args += [ "--enable_jni_multiplexing" ] + } if (defined(invoker.namespace)) { args += [ "-n ${invoker.namespace}" ] } @@ -198,17 +230,16 @@ if (enable_java_templates) { outputs = [] foreach(_name, _input_names) { - _name_part = get_path_info(_name, "name") - outputs += [ "${_jni_output_dir}/${_name_part}_jni.h" ] - } + _name = get_path_info(_name, "name") + "_jni.h" + outputs += [ "$_jni_output_dir/$_name" ] - # Avoid passing GN lists because not all webrtc embedders use //build. - foreach(_output, outputs) { + # Avoid passing GN lists because not all webrtc embedders use //build. args += [ - "--output_file", - rebase_path(_output, root_build_dir), + "--output_name", + _name, ] } + foreach(_input, _input_args) { args += [ "--input_file=$_input" ] } @@ -216,8 +247,35 @@ if (enable_java_templates) { if (enable_profiling) { args += [ "--enable_profiling" ] } - if (enable_jni_tracing) { - args += [ "--enable_tracing" ] + if (current_toolchain != default_toolchain && target_os == "android") { + # Rather than regenerating .h files in secondary toolchains, re-use the + # ones from the primary toolchain by depending on it and adding the + # root gen directory to the include paths. + # https://crbug.com/1369398 + inputs = [] + outputs = [] + _stamp = "$target_gen_dir/$target_name.stamp" + outputs = [ _stamp ] + + # Since we used to generate the .h files rather than delegate, the + # script will delete all .h files it finds in --prev_output_dir. + # TODO(agrieve): --prev_output_dir used only to make incremental builds + # work. Convert to group() target at some point after 2022. + args += [ + "--stamp", + rebase_path(_stamp, root_build_dir), + ] + deps = [] + public_deps = [] + public_deps = [ ":$target_name($default_toolchain)" ] + public_configs = + [ "//build/config/android:jni_include_dir($default_toolchain)" ] + } else { + public_configs = [ "//build/config/android:jni_include_dir" ] + if (defined(visibility)) { + # Allow dependency on ourselves from secondary toolchain. + visibility += [ ":$target_name" ] + } } } } @@ -265,6 +323,8 @@ if (enable_java_templates) { # android.jar # always_mangle: Mangle all generated method names. By default, the script # only mangles methods that cause ambiguity due to method overload. + # unchecked_exceptions: Don't CHECK() for exceptions in generated stubs. + # This behaves as if every method had @CalledByNativeUnchecked. # deps, public_deps: As normal # # Example @@ -282,7 +342,10 @@ if (enable_java_templates) { forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) } } +} # enable_java_templates +# non-robolectric things +if (enable_java_templates && is_android) { # Declare a jni registration target. # # This target generates a srcjar containing a copy of GEN_JNI.java, which has @@ -297,9 +360,10 @@ if (enable_java_templates) { # about the format of the header file. # # Variables - # targets: List of .build_config supported targets to provide java sources. - # header_output: Path to the generated .h file (optional). - # sources_exclusions: List of .java files that should be skipped. (optional) + # targets: List of .build_config.json supported targets to provide java sources. + # manual_jni_registration: Manually do JNI registration - required for feature + # splits which provide their own native library. (optional) + # file_exclusions: List of .java files that should be skipped. (optional) # namespace: Registration functions will be wrapped into this. (optional) # require_native_mocks: Enforce that any native calls using # org.chromium.base.annotations.NativeMethods must have a mock set @@ -314,8 +378,8 @@ if (enable_java_templates) { # Example # generate_jni_registration("chrome_jni_registration") { # targets = [ ":chrome_public_apk" ] - # header_output = "$target_gen_dir/$target_name.h" - # sources_exclusions = [ + # manual_jni_registration = false + # file_exclusions = [ # "//path/to/Exception.java", # ] # } @@ -341,50 +405,84 @@ if (enable_java_templates) { _build_config = get_label_info("${_target}($default_toolchain)", "target_gen_dir") + "/" + get_label_info("${_target}($default_toolchain)", "name") + - ".build_config" + ".build_config.json" _rebased_build_config = rebase_path(_build_config, root_build_dir) inputs += [ _build_config ] if (defined(invoker.no_transitive_deps) && invoker.no_transitive_deps) { - args += [ "--sources-files=@FileArg($_rebased_build_config:deps_info:java_sources_file)" ] + args += [ "--sources-files=@FileArg($_rebased_build_config:deps_info:target_sources_file)" ] } else { args += [ # This is a list of .sources files. - "--sources-files=@FileArg($_rebased_build_config:deps_info:jni:all_source)", + "--sources-files=@FileArg($_rebased_build_config:deps_info:jni_all_source)", ] } } + if (defined(invoker.include_testonly)) { + _include_testonly = invoker.include_testonly + } else { + _include_testonly = defined(testonly) && testonly + } + if (_include_testonly) { + args += [ "--include-test-only" ] + } if (use_hashed_jni_names) { - args += [ "--use_proxy_hash" ] + args += [ "--use-proxy-hash" ] } if (defined(invoker.enable_native_mocks) && invoker.enable_native_mocks) { - args += [ "--enable_proxy_mocks" ] + args += [ "--enable-proxy-mocks" ] if (defined(invoker.require_native_mocks) && invoker.require_native_mocks) { - args += [ "--require_mocks" ] + args += [ "--require-mocks" ] } } - if (defined(invoker.header_output)) { - outputs += [ invoker.header_output ] + _manual_jni_registration = defined(invoker.manual_jni_registration) && + invoker.manual_jni_registration + _enable_jni_multiplexing = defined(invoker.enable_jni_multiplexing) && + invoker.enable_jni_multiplexing + if (_manual_jni_registration) { + args += [ "--manual-jni-registration" ] + } + if (_enable_jni_multiplexing) { + args += [ "--enable-jni-multiplexing" ] + } + + if ((!defined(invoker.prevent_header_output) || + !invoker.prevent_header_output) && + (_manual_jni_registration || _enable_jni_multiplexing)) { + assert(current_toolchain == default_toolchain, + "We do not need >1 toolchain copies of the same header.") + + _subdir = rebase_path(target_gen_dir, root_gen_dir) + _jni_header_output = + "$jni_headers_dir/$_subdir/${target_name}_generated.h" + outputs += [ _jni_header_output ] args += [ "--header-path", - rebase_path(invoker.header_output, root_build_dir), + rebase_path(_jni_header_output, root_build_dir), ] + + # This gives targets depending on this registration access to our generated header. + public_configs = [ "//build/config/android:jni_include_dir" ] } - if (defined(invoker.sources_exclusions)) { - _rebase_sources_exclusions = - rebase_path(invoker.sources_exclusions, root_build_dir) - args += [ "--sources-exclusions=$_rebase_sources_exclusions" ] + if (defined(invoker.file_exclusions)) { + _rebase_file_exclusions = + rebase_path(invoker.file_exclusions, root_build_dir) + args += [ "--file-exclusions=$_rebase_file_exclusions" ] } if (defined(invoker.namespace)) { args += [ "--namespace=${invoker.namespace}" ] } + + if (defined(invoker.module_name)) { + args += [ "--module-name=${invoker.module_name}" ] + } } } @@ -586,8 +684,8 @@ if (enable_java_templates) { # foo_features.cc: # # // A feature. - # const base::Feature kSomeFeature{"SomeFeature", - # base::FEATURE_DISABLED_BY_DEFAULT}; + # BASE_FEATURE(kSomeFeature, "SomeFeature", + # base::FEATURE_DISABLED_BY_DEFAULT); # # FooFeatures.java.tmpl # @@ -610,7 +708,11 @@ if (enable_java_templates) { # my.java.package. template("java_cpp_features") { action_with_pydeps(target_name) { - forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "sources" ]) + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "deps", + "sources", + ]) # The sources aren't compiled so don't check their dependencies. check_includes = false @@ -688,8 +790,6 @@ if (enable_java_templates) { # version_number: (Optional) String of expected version of 'main' native # library. # enable_chromium_linker: (Optional) Whether to use the Chromium linker. - # load_library_from_apk: (Optional) Whether libraries should be loaded from - # the APK without uncompressing. # use_final_fields: True to use final fields. When false, all other # variables must not be set. template("write_native_libraries_java") { @@ -717,14 +817,16 @@ if (enable_java_templates) { if (invoker.use_final_fields) { # Write native_libraries_list_file via depfile rather than specifyin it # as a dep in order allow R8 to run in parallel with native compilation. - depfile = "$target_gen_dir/$target_name.d" - args += [ - "--final", - "--depfile", - rebase_path(depfile, root_build_dir), - "--native-libraries-list", - rebase_path(invoker.native_libraries_list_file, root_build_dir), - ] + args += [ "--final" ] + if (defined(invoker.native_libraries_list_file)) { + depfile = "$target_gen_dir/$target_name.d" + args += [ + "--native-libraries-list", + rebase_path(invoker.native_libraries_list_file, root_build_dir), + "--depfile", + rebase_path(depfile, root_build_dir), + ] + } if (defined(invoker.main_component_library)) { args += [ "--main-component-library", @@ -735,13 +837,6 @@ if (enable_java_templates) { invoker.enable_chromium_linker) { args += [ "--enable-chromium-linker" ] } - if (defined(invoker.load_library_from_apk) && - invoker.load_library_from_apk) { - args += [ "--load-library-from-apk" ] - } - if (defined(invoker.use_modern_linker) && invoker.use_modern_linker) { - args += [ "--use-modern-linker" ] - } } } } @@ -788,7 +883,7 @@ if (enable_java_templates) { # template("android_generated_resources") { forward_variables_from(invoker, [ "testonly" ]) - _build_config = "$target_gen_dir/${target_name}.build_config" + _build_config = "$target_gen_dir/${target_name}.build_config.json" _rtxt_out_path = "$target_gen_dir/${target_name}.R.txt" write_build_config("$target_name$build_config_target_suffix") { forward_variables_from(invoker, [ "resource_overlay" ]) @@ -954,6 +1049,8 @@ if (enable_java_templates) { # merged into apks that directly or indirectly depend on this target. # android_manifest_dep: Target that generates AndroidManifest (if applicable) # custom_package: java package for generated .java files. + # allow_missing_resources: Do not fail if a resource exists in a directory + # but is not listed in sources. # shared_resources: If true make a resource package that can be loaded by a # different application at runtime to access the package's resources. # resource_overlay: Whether the resources in 'sources' should override @@ -996,7 +1093,7 @@ if (enable_java_templates) { _resources_zip = "$target_out_dir/$target_name.resources.zip" _r_text_out_path = _base_path + "_R.txt" - _build_config = _base_path + ".build_config" + _build_config = _base_path + ".build_config.json" _build_config_target_name = "$target_name$build_config_target_suffix" _deps = [] @@ -1005,9 +1102,9 @@ if (enable_java_templates) { } if (defined(invoker.alternative_android_sdk_dep)) { - _deps += [ invoker.alternative_android_sdk_dep ] + _android_sdk_dep = invoker.alternative_android_sdk_dep } else { - _deps += [ "//third_party/android_sdk:android_sdk_java" ] + _android_sdk_dep = default_android_sdk_dep } _resource_files = [] @@ -1046,7 +1143,7 @@ if (enable_java_templates) { ]) r_text = _r_text_out_path - possible_config_deps = _deps + possible_config_deps = _deps + [ _android_sdk_dep ] # Always merge manifests from resources. # * Might want to change this at some point for consistency and clarity, @@ -1059,10 +1156,30 @@ if (enable_java_templates) { prepare_resources(target_name) { forward_variables_from(invoker, [ + "allow_missing_resources", + "public_deps", "strip_drawables", "visibility", ]) - deps = _deps + _lib_deps = filter_exclude(filter_include(_deps, java_library_patterns), + java_resource_patterns) + if (defined(public_deps)) { + # Since java library targets depend directly on sub-targets rather than + # top-level targets, public_deps are not properly propagated, at least + # in terms of the "did you depend on the target that generates your + # inputs" GN check. + assert(filter_include(public_deps, java_target_patterns) == [], + "Java targets should use deps, not public_deps. " + + "target=${target_name}, public_deps=${public_deps}") + } + + # Depend on non-library deps and on __assetres subtargets of library deps. + deps = filter_exclude(_deps, _lib_deps) + [ _android_sdk_dep ] + foreach(_lib_dep, _lib_deps) { + # Expand //foo/java -> //foo/java:java + _lib_dep = get_label_info(_lib_dep, "label_no_toolchain") + deps += [ "${_lib_dep}__assetres" ] + } res_sources_path = _res_sources_path sources = _resource_files @@ -1118,9 +1235,17 @@ if (enable_java_templates) { template("android_assets") { forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) - _build_config = "$target_gen_dir/$target_name.build_config" + _build_config = "$target_gen_dir/$target_name.build_config.json" _build_config_target_name = "$target_name$build_config_target_suffix" + _sources = [] + if (defined(invoker.sources)) { + _sources = invoker.sources + } + _renaming_sources = [] + if (defined(invoker.renaming_sources)) { + _renaming_sources = invoker.renaming_sources + } write_build_config(_build_config_target_name) { type = "android_assets" build_config = _build_config @@ -1135,13 +1260,13 @@ if (enable_java_templates) { possible_config_deps = invoker.deps } - if (defined(invoker.sources)) { - asset_sources = invoker.sources + if (_sources != []) { + asset_sources = _sources } - if (defined(invoker.renaming_sources)) { + if (_renaming_sources != []) { assert(defined(invoker.renaming_destinations)) _source_count = 0 - foreach(_, invoker.renaming_sources) { + foreach(_, _renaming_sources) { _source_count += 1 } _dest_count = 0 @@ -1151,14 +1276,36 @@ if (enable_java_templates) { assert( _source_count == _dest_count, "android_assets() renaming_sources.length != renaming_destinations.length") - asset_renaming_sources = invoker.renaming_sources + asset_renaming_sources = _renaming_sources asset_renaming_destinations = invoker.renaming_destinations } } - group(target_name) { - forward_variables_from(invoker, [ "deps" ]) - public_deps = [ ":$_build_config_target_name" ] + # Use an action in order to mark sources as "inputs" to a GN target so that + # GN will fail if the appropriate deps do not exist, and so that "gn refs" + # will know about the sources. We do not add these inputs & deps to the + # __build_config target because we want building .build_config.json files + # to be fast (and because write_build_config.py does not need the files to + # exist). + _all_sources = _sources + _renaming_sources + if (_all_sources != []) { + action(target_name) { + forward_variables_from(invoker, [ "deps" ]) + public_deps = [ ":$_build_config_target_name" ] + + script = "//build/android/gyp/validate_inputs.py" + inputs = _all_sources + outputs = [ "$target_gen_dir/$target_name.stamp" ] + args = [ + "--stamp", + rebase_path(outputs[0], root_build_dir), + ] + rebase_path(_all_sources, root_build_dir) + } + } else { + group(target_name) { + forward_variables_from(invoker, [ "deps" ]) + public_deps = [ ":$_build_config_target_name" ] + } } } @@ -1171,32 +1318,56 @@ if (enable_java_templates) { # } # } template("java_group") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) _build_config_vars = [ "input_jars_paths", + "preferred_dep", "mergeable_android_manifests", "proguard_configs", + "requires_android", ] - forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + _invoker_deps = [] + if (defined(invoker.deps)) { + _invoker_deps += invoker.deps + } + if (defined(invoker.public_deps)) { + _invoker_deps += invoker.public_deps + } write_build_config("$target_name$build_config_target_suffix") { forward_variables_from(invoker, _build_config_vars) type = "group" - build_config = "$target_gen_dir/${invoker.target_name}.build_config" + build_config = "$target_gen_dir/${invoker.target_name}.build_config.json" supports_android = true - if (defined(invoker.deps)) { - possible_config_deps = invoker.deps - } + possible_config_deps = _invoker_deps + } + + _assetres_deps = filter_include(_invoker_deps, java_resource_patterns) + _invoker_deps_minus_assetres = filter_exclude(_invoker_deps, _assetres_deps) + _lib_deps = + filter_include(_invoker_deps_minus_assetres, java_library_patterns) + + _expanded_lib_deps = [] + foreach(_lib_dep, _lib_deps) { + _expanded_lib_deps += [ get_label_info(_lib_dep, "label_no_toolchain") ] } foreach(_group_name, [ - "header", - "impl", "assetres", + "header", + "host", + "validate", ]) { - java_lib_group("${target_name}__${_group_name}") { - forward_variables_from(invoker, [ "deps" ]) - group_name = _group_name + group("${target_name}__$_group_name") { + deps = [] + foreach(_lib_dep, _expanded_lib_deps) { + deps += [ "${_lib_dep}__${_group_name}" ] + } + if (_group_name == "assetres") { + deps += _assetres_deps + } } } + group(target_name) { forward_variables_from(invoker, "*", @@ -1237,10 +1408,6 @@ if (enable_java_templates) { forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) type = "java_binary" - if (!defined(data_deps)) { - data_deps = [] - } - data_deps += [ "//third_party/jdk:java_data" ] } } @@ -1276,90 +1443,165 @@ if (enable_java_templates) { } } - # Declare a Junit executable target + # Declare a Robolectric host side test binary. # - # This target creates an executable from java code for running as a junit test - # suite. The executable will be in the output folder's /bin/ directory. + # This target creates an executable from java code for running as a + # Robolectric test suite. The executable will be in the output folder's /bin/ + # directory. # # Supports all variables of java_binary(). # # Example - # junit_binary("foo") { + # robolectric_binary("foo") { # sources = [ "org/chromium/foo/FooTest.java" ] # deps = [ ":bar_java" ] # } - template("junit_binary") { + template("robolectric_binary") { testonly = true - _java_binary_target_name = "${target_name}__java_binary" - _test_runner_target_name = "${target_name}__test_runner_script" _main_class = "org.chromium.testing.local.JunitTestMain" - - _build_config = "$target_gen_dir/$target_name.build_config" + _build_config = "$target_gen_dir/$target_name.build_config.json" _build_config_target_name = "$target_name$build_config_target_suffix" - _deps = [ + _java_binary_target_name = "${target_name}__java_binary" + + _invoker_deps = [ "//testing/android/junit:junit_test_support", "//third_party/android_deps:robolectric_all_java", "//third_party/junit", "//third_party/mockito:mockito_java", ] if (defined(invoker.deps)) { - _deps += invoker.deps + _invoker_deps += invoker.deps } + _non_java_deps = filter_exclude(_invoker_deps, java_target_patterns) + _java_assetres_deps = [ ":${_java_binary_target_name}__assetres" ] + if (defined(invoker.alternative_android_sdk_dep)) { _android_sdk_dep = invoker.alternative_android_sdk_dep } else { - _android_sdk_dep = "//third_party/android_sdk:android_sdk_java" + _android_sdk_dep = default_android_sdk_dep } - # a package name or a manifest is required to have resources. This is + # A package name or a manifest is required to have resources. This is # added so that junit tests that do not care about the package name can # still use resources without having to explicitly set one. if (defined(invoker.package_name)) { _package_name = invoker.package_name } else if (!defined(invoker.android_manifest)) { - _package_name = "org.chromium.test" + _package_name = "no.manifest.configured" } - _resource_arsc_output = "${target_gen_dir}/${target_name}.ap_" - _compile_resources_target = "${target_name}__compile_resources" - compile_resources(_compile_resources_target) { - forward_variables_from(invoker, [ "android_manifest" ]) - deps = _deps + _merge_manifest_target_name = "${target_name}__merge_manifests" + _android_manifest = + "$target_gen_dir/$target_name.AndroidManifest.merged.xml" + + merge_manifests(_merge_manifest_target_name) { + if (defined(invoker.android_manifest)) { + input_manifest = invoker.android_manifest + } else { + input_manifest = "//build/android/AndroidManifest.xml" + } + + if (defined(_package_name)) { + manifest_package = _package_name + } + output_manifest = _android_manifest + build_config = _build_config + min_sdk_version = default_min_sdk_version + target_sdk_version = android_sdk_version + deps = _non_java_deps + _java_assetres_deps + + [ ":$_build_config_target_name" ] + if (defined(invoker.android_manifest_dep)) { + deps += [ invoker.android_manifest_dep ] + } + } + + _resource_arsc_output = "${target_out_dir}/${target_name}.ap_" + _compile_resources_target_name = "${target_name}__compile_resources" + compile_resources(_compile_resources_target_name) { + deps = _non_java_deps + _java_assetres_deps + + [ ":$_merge_manifest_target_name" ] android_sdk_dep = _android_sdk_dep build_config_dep = ":$_build_config_target_name" build_config = _build_config if (defined(_package_name)) { rename_manifest_package = _package_name } - if (!defined(android_manifest)) { - android_manifest = "//build/android/AndroidManifest.xml" - } + android_manifest = _android_manifest arsc_output = _resource_arsc_output min_sdk_version = default_min_sdk_version target_sdk_version = android_sdk_version } - _jni_srcjar_target = "${target_name}__final_jni" - _outer_target_name = target_name - generate_jni_registration(_jni_srcjar_target) { - enable_native_mocks = true - require_native_mocks = true - targets = [ ":$_outer_target_name" ] + # apkbuilder step needed only to add android assets to the .ap_ file. + _apkbuilder_output = "${target_out_dir}/${target_name}.robo.ap_" + _apkbuilder_target_name = "${target_name}__apkbuilder" + package_apk("$_apkbuilder_target_name") { + build_config = _build_config + min_sdk_version = default_min_sdk_version + deps = _java_assetres_deps + [ + ":$_build_config_target_name", + ":$_compile_resources_target_name", + ] + + is_robolectric_apk = true + packaged_resources_path = _resource_arsc_output + output_apk_path = _apkbuilder_output + } + + # Some may want to disable this to remove dependency on //base + # (JNI generator is in //base). + _generate_final_jni = + !defined(invoker.generate_final_jni) || invoker.generate_final_jni + if (_generate_final_jni) { + _jni_srcjar_target_name = "${target_name}__final_jni" + _outer_target_name = target_name + generate_jni_registration(_jni_srcjar_target_name) { + enable_native_mocks = true + require_native_mocks = !defined(invoker.shared_libraries) + targets = [ ":$_outer_target_name" ] + } + + if (defined(invoker.shared_libraries)) { + foreach(_dep, invoker.shared_libraries) { + assert( + string_replace(_dep, robolectric_toolchain, "") != _dep, + "$target_name has shared_libraries with incorrect toolchain. " + + "Should contain (\$robolectric_toolchain) suffix: $_dep") + } + + # Write shared library output files of all dependencies to a file. Those + # will be the shared libraries packaged into the APK. + _shared_library_list_file = "$target_gen_dir/$target_name.native_libs" + generated_file("${target_name}__shared_library_list") { + deps = invoker.shared_libraries + outputs = [ _shared_library_list_file ] + data_keys = [ "shared_libraries" ] + walk_keys = [ "shared_libraries_barrier" ] + rebase = root_build_dir + } + } + _native_libraries_target_name = "${target_name}__native_libraries" + write_native_libraries_java(_native_libraries_target_name) { + enable_chromium_linker = false + use_final_fields = true + if (defined(_shared_library_list_file)) { + native_libraries_list_file = _shared_library_list_file + } + } } java_library_impl(_java_binary_target_name) { - forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY + [ "deps" ]) - type = "junit_binary" + forward_variables_from(invoker, + "*", + TESTONLY_AND_VISIBILITY + [ + "deps", + "shared_libraries", + ]) + type = "robolectric_binary" main_target_name = invoker.target_name - # Include the android SDK jar(s) for resource processing. - include_android_sdk = true - - # Robolectric can handle deps that set !supports_android as well those - # that set requires_android. - bypass_platform_checks = true - deps = _deps + deps = _invoker_deps testonly = true main_class = _main_class wrapper_script_name = "helper/$main_target_name" @@ -1369,37 +1611,51 @@ if (enable_java_templates) { # 66%, which makes sharding more effective. tiered_stop_at_level_one = true + is_robolectric = true + include_android_sdk = true + alternative_android_sdk_dep = + "//third_party/robolectric:robolectric_test_sdk_java" + if (!defined(srcjar_deps)) { srcjar_deps = [] } srcjar_deps += [ - ":$_compile_resources_target", - ":$_jni_srcjar_target", - - # This dep is required for any targets that depend on //base:base_java. - "//build/android:build_config_gen", + ":$_compile_resources_target_name", + "//build/android:build_config_for_testing_gen", ] + if (_generate_final_jni) { + srcjar_deps += [ + ":$_jni_srcjar_target_name", + ":$_native_libraries_target_name", + ] + } } - test_runner_script(_test_runner_target_name) { - test_name = invoker.target_name - test_suite = invoker.target_name - test_type = "junit" - ignore_all_data_deps = true - resource_apk = _resource_arsc_output - } - - group(target_name) { + test_runner_script(target_name) { forward_variables_from(invoker, [ "assert_no_deps", "visibility", ]) - public_deps = [ + test_name = invoker.target_name + test_suite = invoker.target_name + test_type = "junit" + ignore_all_data_deps = true + resource_apk = _apkbuilder_output + deps = [ + ":$_apkbuilder_target_name", ":$_build_config_target_name", - ":$_java_binary_target_name", - ":$_test_runner_target_name", + ":${_java_binary_target_name}__host", + ":${_java_binary_target_name}__java_binary_script", + ":${_java_binary_target_name}__validate", + "//third_party/robolectric:robolectric_runtime_jars", ] + if (defined(invoker.shared_libraries)) { + data_deps = invoker.shared_libraries + } + + # Add non-libary deps, since the __host target does not depend on them. + deps += filter_exclude(_invoker_deps, java_library_patterns) } } @@ -1513,8 +1769,6 @@ if (enable_java_templates) { # # Variables: # output: Path to the output jar. - # override_build_config: Use a pre-existing .build_config. Must be of type - # "apk". # use_interface_jars: Use all dependent interface .jars rather than # implementation .jars. # use_unprocessed_jars: Use unprocessed / undesugared .jars. @@ -1531,8 +1785,6 @@ if (enable_java_templates) { # TODO(crbug.com/1042017): Remove. not_needed(invoker, [ "no_build_hooks" ]) forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) - _supports_android = - !defined(invoker.supports_android) || invoker.supports_android _use_interface_jars = defined(invoker.use_interface_jars) && invoker.use_interface_jars _use_unprocessed_jars = @@ -1544,31 +1796,23 @@ if (enable_java_templates) { _jar_target_name = target_name - _deps = [] - if (defined(invoker.deps)) { - _deps = invoker.deps - } - if (_supports_android) { - _deps += [ "//third_party/android_sdk:android_sdk_java" ] - } - - if (defined(invoker.override_build_config)) { - _build_config = invoker.override_build_config + if (defined(invoker.build_config)) { + _build_config = invoker.build_config + _build_config_dep = invoker.build_config_dep } else { - _build_config = "$target_gen_dir/$target_name.build_config" + _build_config = "$target_gen_dir/$target_name.build_config.json" _build_config_target_name = "$target_name$build_config_target_suffix" + _build_config_dep = ":$_build_config_target_name" write_build_config(_build_config_target_name) { type = "dist_jar" - supports_android = _supports_android + supports_android = + !defined(invoker.supports_android) || invoker.supports_android requires_android = defined(invoker.requires_android) && invoker.requires_android - possible_config_deps = _deps - ignore_dependency_public_deps = _direct_deps_only + possible_config_deps = invoker.deps build_config = _build_config } - - _deps += [ ":$_build_config_target_name" ] } _rebased_build_config = rebase_path(_build_config, root_build_dir) @@ -1576,7 +1820,22 @@ if (enable_java_templates) { forward_variables_from(invoker, [ "data" ]) script = "//build/android/gyp/zip.py" depfile = "$target_gen_dir/$target_name.d" - deps = _deps + deps = [ _build_config_dep ] + + if (_use_interface_jars) { + _lib_deps = + filter_exclude(filter_include(invoker.deps, java_library_patterns), + java_resource_patterns) + _other_deps = filter_exclude(invoker.deps, _lib_deps) + foreach(_lib_dep, _lib_deps) { + # Expand //foo/java -> //foo/java:java + _lib_dep = get_label_info(_lib_dep, "label_no_toolchain") + deps += [ "${_lib_dep}__header" ] + } + deps += _other_deps + } else { + deps += invoker.deps + } inputs = [ _build_config ] @@ -1611,6 +1870,7 @@ if (enable_java_templates) { args += [ "--input-zips=@FileArg($_rebased_build_config:deps_info:device_classpath)" ] } } + _excludes = [] if (defined(invoker.jar_excluded_patterns)) { _excludes += invoker.jar_excluded_patterns @@ -1619,6 +1879,9 @@ if (enable_java_templates) { # Turbine adds files like: META-INF/TRANSITIVE/.../Foo.class # These confuse proguard: https://crbug.com/1081443 _excludes += [ "META-INF/*" ] + } else { + # Manifest files will never be correct when merging jars. + _excludes += [ "META-INF/*.MF" ] } if (_excludes != []) { args += [ "--input-zips-excluded-globs=$_excludes" ] @@ -1633,6 +1896,8 @@ if (enable_java_templates) { # proguard_enabled: Whether to enable R8. # proguard_configs: List of proguard configs. # proguard_enable_obfuscation: Whether to enable obfuscation (default=true). + # package_name: Used in the Proguard map ID. + # version_code: Used in the Proguard map ID. # # Example # dist_dex("lib_fatjar") { @@ -1640,12 +1905,12 @@ if (enable_java_templates) { # output = "$root_build_dir/MyLibrary.jar" # } template("dist_dex") { - _deps = [ "//third_party/android_sdk:android_sdk_java" ] + _deps = [ default_android_sdk_dep ] if (defined(invoker.deps)) { _deps += invoker.deps } - _build_config = "$target_gen_dir/$target_name.build_config" + _build_config = "$target_gen_dir/$target_name.build_config.json" _build_config_target_name = "$target_name$build_config_target_suffix" write_build_config(_build_config_target_name) { @@ -1661,19 +1926,19 @@ if (enable_java_templates) { build_config = _build_config } - _deps += [ ":$_build_config_target_name" ] - dex(target_name) { forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "data", "data_deps", + "package_name", "proguard_configs", "proguard_enabled", "proguard_enable_obfuscation", "min_sdk_version", + "version_code", ]) - deps = _deps + deps = [ ":$_build_config_target_name" ] + _deps build_config = _build_config enable_multidex = false output = invoker.output @@ -1683,13 +1948,10 @@ if (enable_java_templates) { # per-target dex steps are emitted here since this is using jar files # rather than dex files. ignore_desugar_missing_deps = true - - # When trying to build a stand-alone .dex, don't add in jdk_libs_dex. - supports_jdk_library_desugaring = false } else { _rebased_build_config = rebase_path(_build_config, root_build_dir) input_dex_filearg = - "@FileArg(${_rebased_build_config}:final_dex:all_dex_files)" + "@FileArg(${_rebased_build_config}:deps_info:all_dex_files)" } } } @@ -1714,9 +1976,11 @@ if (enable_java_templates) { # proguard_configs: List of proguard configs (optional). # android_manifest: Path to AndroidManifest.xml (optional). # native_libraries: list of native libraries (optional). - # direct_deps_only: Do not recurse on deps. (optional, defaults false). - # jar_excluded_patterns (optional): List of globs for paths to exclude. - # jar_included_patterns (optional): List of globs for paths to include. + # direct_deps_only: Do not recurse on deps (optional, defaults false). + # jar_excluded_patterns: List of globs for paths to exclude (optional). + # jar_included_patterns: List of globs for paths to include (optional). + # generate_final_jni: If defined an true, generate the final + # `GEN_JNI.java` and include it in the output `.aar` (optional) # # Example # dist_aar("my_aar") { @@ -1726,15 +1990,34 @@ if (enable_java_templates) { template("dist_aar") { forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + _direct_deps_only = + defined(invoker.direct_deps_only) && invoker.direct_deps_only + _deps = [] - if (defined(invoker.deps)) { - _deps = invoker.deps + + _generate_final_jni = + defined(invoker.generate_final_jni) && invoker.generate_final_jni + if (_generate_final_jni) { + _outer_target_name = target_name + _jni_srcjar_target = "${target_name}__final_jni" + generate_jni_registration(_jni_srcjar_target) { + targets = [ ":$_outer_target_name" ] + } + _jni_java_target = "${target_name}__final_jni_java" + java_library_impl(_jni_java_target) { + type = "java_library" + supports_android = true + requires_android = true + srcjar_deps = [ ":$_jni_srcjar_target" ] + } + _deps += [ ":$_jni_java_target" ] } - _direct_deps_only = - defined(invoker.direct_deps_only) && invoker.direct_deps_only + if (defined(invoker.deps)) { + _deps += invoker.deps + } - _build_config = "$target_gen_dir/$target_name.build_config" + _build_config = "$target_gen_dir/$target_name.build_config.json" _build_config_target_name = "$target_name$build_config_target_suffix" write_build_config(_build_config_target_name) { @@ -1743,7 +2026,6 @@ if (enable_java_templates) { possible_config_deps = _deps supports_android = true requires_android = true - ignore_dependency_public_deps = _direct_deps_only build_config = _build_config } @@ -1752,7 +2034,11 @@ if (enable_java_templates) { _rebased_build_config = rebase_path(_build_config, root_build_dir) action_with_pydeps(target_name) { - forward_variables_from(invoker, [ "data" ]) + forward_variables_from(invoker, + [ + "data", + "assert_no_deps", + ]) depfile = "$target_gen_dir/$target_name.d" deps = _deps script = "//build/android/gyp/dist_aar.py" @@ -1780,8 +2066,11 @@ if (enable_java_templates) { if (_direct_deps_only) { args += [ "--jars=@FileArg($_rebased_build_config:javac:classpath)" ] } else { - args += [ "--jars=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)" ] + args += [ + "--jars=@FileArg($_rebased_build_config:deps_info:device_classpath)", + ] } + if (defined(invoker.android_manifest)) { args += [ "--android-manifest", @@ -1820,14 +2109,8 @@ if (enable_java_templates) { # Supports all variables of java_library(), plus: # deps: In addition to defining java deps, this can also include # android_assets() and android_resources() targets. - # alternative_android_sdk_ijar: if set, the given android_sdk_ijar file - # replaces the default android_sdk_ijar. - # alternative_android_sdk_ijar_dep: the target that generates - # alternative_android_sdk_ijar, must be set if alternative_android_sdk_ijar - # is used. - # alternative_android_sdk_jar: actual jar corresponding to - # alternative_android_sdk_ijar, must be set if alternative_android_sdk_ijar - # is used. + # alternative_android_sdk_dep: android_system_java_prebuilt target to use + # in place of the default android.jar. # # Example # android_library("foo_java") { @@ -1862,8 +2145,64 @@ if (enable_java_templates) { "*/R\$*.class", "*/Manifest.class", "*/Manifest\$*.class", - "*/GEN_JNI.class", + "*/*GEN_JNI.class", + ] + } + } + + # Declare an Android robolectric library target + # + # This target creates an Android library containing java code and Android + # resources. + # + # Supports all variables of java_library(), plus: + # deps: In addition to defining java deps, this can also include + # android_assets() and android_resources() targets. + # + # Example + # robolectric_library("foo_junit") { + # sources = [ + # "android/org/chromium/foo/FooTest.java", + # "android/org/chromium/foo/FooTestUtils.java", + # "android/org/chromium/foo/FooMock.java", + # ] + # deps = [ + # "//base:base_junit_test_support" + # ] + # srcjar_deps = [ + # ":foo_generated_enum" + # ] + # jar_excluded_patterns = [ + # "*/FooService.class", "org/chromium/FooService\$*.class" + # ] + # } + template("robolectric_library") { + java_library(target_name) { + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + testonly = true + + is_robolectric = true + include_android_sdk = true + alternative_android_sdk_dep = + "//third_party/robolectric:robolectric_test_sdk_java" + + if (!defined(jar_excluded_patterns)) { + jar_excluded_patterns = [] + } + jar_excluded_patterns += [ + "*/R.class", + "*/R\$*.class", + "*/Manifest.class", + "*/Manifest\$*.class", + "*/*GEN_JNI.class", ] + + if (!defined(deps)) { + deps = [] + } + deps += [ "//third_party/android_deps:robolectric_all_java" ] } } @@ -1919,8 +2258,9 @@ if (enable_java_templates) { defines = [] # Set these even when !use_final_fields so that they have correct default - # values within junit_binary(), which ignores jar_excluded_patterns. - if (enable_java_asserts) { + # values within robolectric_binary(), which ignores jar_excluded_patterns. + if ((defined(invoker.assertions_implicitly_enabled) && + invoker.assertions_implicitly_enabled) || enable_java_asserts) { defines += [ "_ENABLE_ASSERTS" ] } if (use_cfi_diag || is_ubsan || is_ubsan_security || is_ubsan_vptr) { @@ -1931,10 +2271,6 @@ if (enable_java_templates) { defines += [ "_IS_CHROME_BRANDED" ] } - if (is_chromecast && chromecast_branding == "internal") { - defines += [ "_IS_CHROMECAST_BRANDING_INTERNAL" ] - } - if (defined(invoker.bundles_supported) && invoker.bundles_supported) { defines += [ "_BUNDLES_SUPPORTED" ] } @@ -1964,6 +2300,10 @@ if (enable_java_templates) { ] } } + + if (defined(testonly) && testonly) { + defines += [ "_IS_FOR_TEST" ] + } } } @@ -1977,14 +2317,12 @@ if (enable_java_templates) { # is_bundle_module: Whether or not this target is part of a bundle build. # java_package: Java package for the generated class. # use_chromium_linker: - # use_modern_linker: template("generate_product_config_srcjar") { java_cpp_template(target_name) { defines = [] _use_final = defined(invoker.build_config) || - defined(invoker.use_chromium_linker) || - defined(invoker.use_modern_linker) || defined(invoker.is_bundle) + defined(invoker.use_chromium_linker) || defined(invoker.is_bundle) if (_use_final) { defines += [ "USE_FINAL" ] } @@ -1994,12 +2332,9 @@ if (enable_java_templates) { _use_chromium_linker = defined(invoker.use_chromium_linker) && invoker.use_chromium_linker - _use_modern_linker = - defined(invoker.use_modern_linker) && invoker.use_modern_linker _is_bundle = defined(invoker.is_bundle_module) && invoker.is_bundle_module defines += [ "USE_CHROMIUM_LINKER_VALUE=$_use_chromium_linker", - "USE_MODERN_LINKER_VALUE=$_use_modern_linker", "IS_BUNDLE_VALUE=$_is_bundle", ] if (defined(invoker.build_config)) { @@ -2026,8 +2361,7 @@ if (enable_java_templates) { # * dependencies of this .so are not automatically included # * ".cr.so" is never added # * they are not side-loaded when incremental_install=true. - # * load_library_from_apk, use_chromium_linker, - # and enable_relocation_packing do not apply + # * use_chromium_linker, and enable_relocation_packing do not apply # Use this instead of shared_libraries when you are going to load the library # conditionally, and only when shared_libraries doesn't work for you. # secondary_abi_loadable_modules: This is the loadable_modules analog to @@ -2047,10 +2381,11 @@ if (enable_java_templates) { # is true when building with Chromium for non-test APKs. # generate_final_jni: If defined and false, skip generating the # GEN_JNI srcjar. - # jni_registration_header: If specified, causes the - # ${target_name}__final_jni target to additionally output a - # header file to this path for use with manual JNI registration. - # jni_sources_exclusions: List of source path to exclude from the + # generate_native_libraries_java: If defined, whether NativeLibraries.java is + # generated is solely controlled by this flag. Otherwise, the default behavior + # is NativeLibraries.java will only be generated for the base module/apk when + # its `shared_libraries` is not empty. + # jni_file_exclusions: List of source path to exclude from the # final_jni step. # aapt_locale_allowlist: If set, all locales not in this list will be # stripped from resources.arsc. @@ -2071,12 +2406,12 @@ if (enable_java_templates) { # shared_resources_allowlist_target: Optional name of a target specifying # an input R.txt file that lists the resources that can be exported # by the APK when shared_resources or app_as_shared_lib is defined. - # uncompress_shared_libraries: True if shared libraries should be stored - # uncompressed in the APK. Must be unset or true if load_library_from_apk - # is set to true. # uncompress_dex: Store final .dex files uncompressed in the apk. + # omit_dex: If true, do not build or include classes.dex. # strip_resource_names: True if resource names should be stripped from the # resources.arsc file in the apk or module. + # strip_unused_resources: True if unused resources should be stripped from + # the apk or module. # short_resource_paths: True if resource paths should be shortened in the # apk or module. # resources_config_paths: List of paths to the aapt2 optimize config files @@ -2090,15 +2425,8 @@ if (enable_java_templates) { # dependent resource targets which override another target set # overlay_resources=true. This check is on for non-test targets and # cannot be disabled. - # static_library_dependent_targets: A list of scopes describing targets that - # use this target as a static library. Common Java code from the targets - # listed in static_library_dependent_targets will be moved into this - # target. Scope members are name and is_resource_ids_provider. # static_library_provider: Specifies a single target that this target will # use as a static library APK. - # static_library_synchronized_proguard: When proguard is enabled, the - # static_library_provider target will provide the dex file(s) for this - # target. # min_sdk_version: The minimum Android SDK version this target supports. # Optional, default $default_min_sdk_version. # target_sdk_version: The target Android SDK version for this target. @@ -2115,7 +2443,6 @@ if (enable_java_templates) { # ProductConfig.java file will be generated for each package. # enable_proguard_checks: Turns on -checkdiscard directives and missing # symbols check in the proguard step (default=true). - # disable_r8_outlining: Turn off outlining during the proguard step. # annotation_processor_deps: List of java_annotation_processor targets to # use when compiling the sources given to this target (optional). # processor_args_javac: List of args to pass to annotation processors when @@ -2135,10 +2462,11 @@ if (enable_java_templates) { # with this file as the base. template("android_apk_or_module") { forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) - assert(defined(invoker.android_manifest)) + _template_name = target_name _base_path = "$target_out_dir/$target_name/$target_name" - _build_config = "$target_gen_dir/$target_name.build_config" + _build_config = "$target_gen_dir/$target_name.build_config.json" _build_config_target = "$target_name$build_config_target_suffix" + _java_target_name = "${_template_name}__java" _min_sdk_version = default_min_sdk_version _target_sdk_version = android_sdk_version @@ -2149,8 +2477,6 @@ if (enable_java_templates) { _target_sdk_version = invoker.target_sdk_version } - _template_name = target_name - _is_bundle_module = defined(invoker.is_bundle_module) && invoker.is_bundle_module if (_is_bundle_module) { @@ -2158,6 +2484,7 @@ if (enable_java_templates) { defined(invoker.is_base_module) && invoker.is_base_module } + _omit_dex = defined(invoker.omit_dex) && invoker.omit_dex _enable_multidex = !defined(invoker.enable_multidex) || invoker.enable_multidex @@ -2166,17 +2493,6 @@ if (enable_java_templates) { _final_rtxt_path = "${_final_apk_path}.R.txt" } - _short_resource_paths = - defined(invoker.short_resource_paths) && invoker.short_resource_paths && - enable_arsc_obfuscation - _strip_resource_names = - defined(invoker.strip_resource_names) && invoker.strip_resource_names && - enable_arsc_obfuscation - _optimize_resources = _strip_resource_names || _short_resource_paths - - if (!_is_bundle_module && _short_resource_paths) { - _final_pathmap_path = "${_final_apk_path}.pathmap.txt" - } _res_size_info_path = "$target_out_dir/$target_name.ap_.info" if (!_is_bundle_module) { _final_apk_path_no_ext_list = @@ -2193,20 +2509,12 @@ if (enable_java_templates) { if (_is_bundle_module) { # Path to the intermediate proto-format resources zip file. _proto_resources_path = "$target_out_dir/$target_name.proto.ap_" - if (_optimize_resources) { - _optimized_proto_resources_path = - "$target_out_dir/$target_name.optimized.proto.ap_" - } } else { # resource_sizes.py needs to be able to find the unpacked resources.arsc # file based on apk name to compute normatlized size. _resource_sizes_arsc_path = "$root_out_dir/arsc/" + rebase_path(_final_apk_path_no_ext, root_build_dir) + ".ap_" - if (_optimize_resources) { - _optimized_arsc_resources_path = - "$target_out_dir/$target_name.optimized.ap_" - } } if (defined(invoker.version_code)) { @@ -2229,45 +2537,23 @@ if (enable_java_templates) { _version_name = android_override_version_name } - _deps = [] if (defined(invoker.deps)) { - _deps = invoker.deps + _invoker_deps = invoker.deps + } else { + _invoker_deps = [] } + _non_java_deps = filter_exclude(_invoker_deps, java_target_patterns) + _java_assetres_deps = [ ":${_java_target_name}__assetres" ] _srcjar_deps = [] if (defined(invoker.srcjar_deps)) { _srcjar_deps = invoker.srcjar_deps } - _android_root_manifest_deps = [] - if (defined(invoker.android_manifest_dep)) { - _android_root_manifest_deps = [ invoker.android_manifest_dep ] - } - _android_root_manifest = invoker.android_manifest - _use_chromium_linker = defined(invoker.use_chromium_linker) && invoker.use_chromium_linker - _use_modern_linker = - defined(invoker.use_modern_linker) && invoker.use_modern_linker - - _load_library_from_apk = - defined(invoker.load_library_from_apk) && invoker.load_library_from_apk - - not_needed([ - "_use_chromium_linker", - "_use_modern_linker", - ]) - assert(!_load_library_from_apk || _use_chromium_linker, - "load_library_from_apk requires use_chromium_linker") - - # Make sure that uncompress_shared_libraries is set to true if - # load_library_from_apk is true. - if (defined(invoker.uncompress_shared_libraries)) { - _uncompress_shared_libraries = invoker.uncompress_shared_libraries - assert(!_load_library_from_apk || _uncompress_shared_libraries) - } else { - _uncompress_shared_libraries = _load_library_from_apk - } + + not_needed([ "_use_chromium_linker" ]) # The dependency that makes the chromium linker, if any is needed. _native_libs_deps = [] @@ -2320,12 +2606,13 @@ if (enable_java_templates) { _rebased_build_config = rebase_path(_build_config, root_build_dir) assert(_rebased_build_config != "") # Mark as used. - _generate_buildconfig_java = !defined(invoker.apk_under_test) + _generate_buildconfig_java = !defined(invoker.apk_under_test) && !_omit_dex if (defined(invoker.generate_buildconfig_java)) { _generate_buildconfig_java = invoker.generate_buildconfig_java } - _generate_productconfig_java = defined(invoker.product_config_java_packages) + _generate_productconfig_java = + defined(invoker.product_config_java_packages) && !_omit_dex # JNI generation usually goes hand-in-hand with buildconfig generation. _generate_final_jni = _generate_buildconfig_java @@ -2340,44 +2627,16 @@ if (enable_java_templates) { _proguard_mapping_path = "$_final_apk_path.mapping" } - # TODO(http://crbug.com/901465): Move shared Java code to static libraries - # when !_proguard_enabled too. - _is_static_library_provider = - defined(invoker.static_library_dependent_targets) && _proguard_enabled - if (_is_static_library_provider) { - _static_library_sync_dex_path = "$_base_path.synchronized.r8dex.jar" - _resource_ids_provider_deps = [] - foreach(_target, invoker.static_library_dependent_targets) { - if (_target.is_resource_ids_provider) { - assert(_resource_ids_provider_deps == [], - "Can only have 1 resource_ids_provider_dep") - _resource_ids_provider_deps += [ _target.name ] - } - } - _resource_ids_provider_dep = _resource_ids_provider_deps[0] - } else if (defined(invoker.resource_ids_provider_dep)) { + if (defined(invoker.resource_ids_provider_dep)) { _resource_ids_provider_dep = invoker.resource_ids_provider_dep } - if (_is_static_library_provider) { - _shared_resources_allowlist_target = _resource_ids_provider_dep - } else if (defined(invoker.shared_resources_allowlist_target)) { + if (defined(invoker.shared_resources_allowlist_target)) { _shared_resources_allowlist_target = invoker.shared_resources_allowlist_target } _uses_static_library = defined(invoker.static_library_provider) - _uses_static_library_synchronized_proguard = - defined(invoker.static_library_synchronized_proguard) && - invoker.static_library_synchronized_proguard - - if (_uses_static_library_synchronized_proguard) { - assert(_uses_static_library) - - # These will be provided by the static library APK. - _generate_buildconfig_java = false - _generate_final_jni = false - } # TODO(crbug.com/864142): Allow incremental installs of bundle modules. _incremental_apk = !_is_bundle_module && @@ -2389,18 +2648,18 @@ if (enable_java_templates) { _incremental_apk_path = "${_final_apk_path_no_ext}_incremental.apk" } - if (!_incremental_apk) { + if (!_incremental_apk && !_omit_dex) { # Bundle modules don't build the dex here, but need to write this path - # to their .build_config file. + # to their .build_config.json file only when proguarding. if (_proguard_enabled) { _final_dex_path = "$_base_path.r8dex.jar" - } else { + } else if (!_is_bundle_module) { _final_dex_path = "$_base_path.mergeddex.jar" } } _android_manifest = - "$target_gen_dir/${_template_name}_manifest/AndroidManifest.xml" + "$target_gen_dir/${_template_name}/AndroidManifest.merged.xml" _merge_manifest_target = "${_template_name}__merge_manifests" merge_manifests(_merge_manifest_target) { forward_variables_from(invoker, @@ -2408,15 +2667,21 @@ if (enable_java_templates) { "manifest_package", "max_sdk_version", ]) - input_manifest = _android_root_manifest + input_manifest = invoker.android_manifest output_manifest = _android_manifest build_config = _build_config min_sdk_version = _min_sdk_version target_sdk_version = _target_sdk_version - deps = _android_root_manifest_deps + [ ":$_build_config_target" ] + + # Depend on android_resources() targets that use generated files + # in mergeable_android_manifests (such as android_aar_prebuilt). + deps = _java_assetres_deps + [ ":$_build_config_target" ] + if (defined(invoker.android_manifest_dep)) { + deps += [ invoker.android_manifest_dep ] + } } - _final_deps = [] + _final_deps = [ ":$_java_target_name" ] _enable_main_dex_list = _enable_multidex && _min_sdk_version < 21 if (_enable_main_dex_list) { @@ -2425,16 +2690,10 @@ if (enable_java_templates) { } _generated_proguard_config = "$_base_path.resources.proguard.txt" - if (_generate_buildconfig_java && - defined(invoker.product_version_resources_dep)) { - # Needs to be added as a .build_config dep to pick up resources. - _deps += [ invoker.product_version_resources_dep ] - } - if (defined(invoker.alternative_android_sdk_dep)) { _android_sdk_dep = invoker.alternative_android_sdk_dep } else { - _android_sdk_dep = "//third_party/android_sdk:android_sdk_java" + _android_sdk_dep = default_android_sdk_dep } if (defined(_shared_resources_allowlist_target)) { @@ -2449,9 +2708,25 @@ if (enable_java_templates) { "${_shared_resources_allowlist_target}__compile_resources" } - if (_short_resource_paths) { - _resources_path_map_out_path = - "${target_gen_dir}/${_template_name}_resources_path_map.txt" + if (_incremental_apk) { + _incremental_android_manifest = + "$target_gen_dir/${_template_name}/AndroidManifest.incremental.xml" + _incremental_manifest_target_name = "${target_name}__incremental_manifest" + action_with_pydeps(_incremental_manifest_target_name) { + deps = [ ":$_merge_manifest_target" ] + script = + "//build/android/incremental_install/generate_android_manifest.py" + inputs = [ _android_manifest ] + outputs = [ _incremental_android_manifest ] + + args = [ + "--disable-isolated-processes", + "--src-manifest", + rebase_path(_android_manifest, root_build_dir), + "--dst-manifest", + rebase_path(_incremental_android_manifest, root_build_dir), + ] + } } _compile_resources_target = "${_template_name}__compile_resources" @@ -2460,33 +2735,28 @@ if (enable_java_templates) { _compile_resources_emit_ids_out = "${target_gen_dir}/${_compile_resources_target}.resource_ids" compile_resources(_compile_resources_target) { - forward_variables_from(invoker, - [ - "aapt_locale_allowlist", - "app_as_shared_lib", - "enforce_resource_overlays_in_tests", - "expected_android_manifest", - "expected_android_manifest_base", - "extra_verification_manifest", - "extra_verification_manifest_dep", - "manifest_package", - "max_sdk_version", - "no_xml_namespaces", - "package_id", - "package_name", - "png_to_webp", - "r_java_root_package_name", - "resource_exclusion_exceptions", - "resource_exclusion_regex", - "resource_values_filter_rules", - "resources_config_paths", - "shared_resources", - "shared_resources_allowlist_locales", - "support_zh_hk", - "uses_split", - ]) - short_resource_paths = _short_resource_paths - strip_resource_names = _strip_resource_names + forward_variables_from( + invoker, + [ + "aapt_locale_allowlist", + "app_as_shared_lib", + "enforce_resource_overlays_in_tests", + "expected_android_manifest", + "expected_android_manifest_base", + "expected_android_manifest_library_version_offset", + "expected_android_manifest_version_code_offset", + "manifest_package", + "max_sdk_version", + "package_id", + "png_to_webp", + "r_java_root_package_name", + "resource_exclusion_exceptions", + "resource_exclusion_regex", + "resource_values_filter_rules", + "shared_resources", + "shared_resources_allowlist_locales", + "uses_split", + ]) android_manifest = _android_manifest android_manifest_dep = ":$_merge_manifest_target" version_code = _version_code @@ -2502,6 +2772,10 @@ if (enable_java_templates) { resource_ids_provider_dep = _resource_ids_provider_dep } + if (defined(invoker.module_name)) { + package_name = invoker.module_name + } + if (defined(invoker.post_process_package_resources_script)) { post_process_script = invoker.post_process_package_resources_script } @@ -2512,21 +2786,15 @@ if (enable_java_templates) { if (_enable_main_dex_list) { proguard_file_main_dex = _generated_proguard_main_dex_config } - if (_short_resource_paths) { - resources_path_map_out_path = _resources_path_map_out_path - } build_config = _build_config build_config_dep = ":$_build_config_target" android_sdk_dep = _android_sdk_dep - deps = _deps + deps = _java_assetres_deps + _non_java_deps - # The static library uses the R.txt files generated by the - # static_library_dependent_targets when generating the final R.java file. - if (_is_static_library_provider) { - foreach(_dep, invoker.static_library_dependent_targets) { - deps += [ "${_dep.name}__compile_resources" ] - } + if (_incremental_apk) { + android_manifest = _incremental_android_manifest + android_manifest_dep = ":$_incremental_manifest_target_name" } if (defined(invoker.apk_under_test)) { @@ -2542,27 +2810,16 @@ if (enable_java_templates) { assert(!defined(resource_ids_provider_dep)) resource_ids_provider_dep = invoker.apk_under_test - include_resource = - get_label_info(invoker.apk_under_test, "target_out_dir") + "/" + - get_label_info(invoker.apk_under_test, "name") + ".ap_" _link_against = invoker.apk_under_test } if (_is_bundle_module) { is_bundle_module = true proto_output = _proto_resources_path - if (_optimize_resources) { - optimized_proto_output = _optimized_proto_resources_path - } if (defined(invoker.base_module_target)) { - include_resource = - get_label_info(invoker.base_module_target, "target_out_dir") + - "/" + get_label_info(invoker.base_module_target, "name") + ".ap_" _link_against = invoker.base_module_target } - } else if (_optimize_resources) { - optimized_arsc_output = _optimized_arsc_resources_path } if (defined(_link_against)) { @@ -2585,16 +2842,63 @@ if (enable_java_templates) { } _srcjar_deps += [ ":$_compile_resources_target" ] - if (defined(_resource_sizes_arsc_path)) { - _copy_arsc_target = "${_template_name}__copy_arsc" - copy(_copy_arsc_target) { - deps = [ ":$_compile_resources_target" ] + # We don't ship apks anymore, only optimize bundle builds + if (_is_bundle_module) { + _short_resource_paths = + defined(invoker.short_resource_paths) && + invoker.short_resource_paths && enable_arsc_obfuscation + _strip_resource_names = + defined(invoker.strip_resource_names) && + invoker.strip_resource_names && enable_arsc_obfuscation + _strip_unused_resources = + defined(invoker.strip_unused_resources) && + invoker.strip_unused_resources && enable_unused_resource_stripping + _optimize_resources = _strip_resource_names || _short_resource_paths || + _strip_unused_resources + } + + if (_is_bundle_module && _optimize_resources) { + _optimized_proto_resources_path = + "$target_out_dir/$target_name.optimized.proto.ap_" + if (_short_resource_paths) { + _resources_path_map_out_path = + "${target_gen_dir}/${_template_name}_resources_path_map.txt" + } + _optimize_resources_target = "${_template_name}__optimize_resources" + optimize_resources(_optimize_resources_target) { + deps = _non_java_deps + [ ":$_compile_resources_target" ] + short_resource_paths = _short_resource_paths + strip_resource_names = _strip_resource_names + if (_short_resource_paths) { + resources_path_map_out_path = _resources_path_map_out_path + } + r_text_path = _compile_resources_rtxt_out + proto_input_path = _proto_resources_path + optimized_proto_output = _optimized_proto_resources_path + if (_strip_unused_resources) { + # These need to be kept in sync with the target names + output paths + # in the android_app_bundle template. + _unused_resources_target = "${_template_name}__unused_resources" + _unused_resources_config_path = + "$target_gen_dir/${_template_name}_unused_resources.config" + resources_config_paths = [ _unused_resources_config_path ] + deps += [ ":$_unused_resources_target" ] + } else { + resources_config_paths = [] + } + if (defined(invoker.resources_config_paths)) { + resources_config_paths += invoker.resources_config_paths + } + } - # resource_sizes.py doesn't care if it gets the optimized .arsc. - sources = [ _arsc_resources_path ] - outputs = [ _resource_sizes_arsc_path ] + if (_strip_unused_resources) { + # Copy the unused resources config to the final bundle output dir. + _copy_unused_resources_target = + "${_template_name}__copy_unused_resources" + _final_deps += [ ":$_copy_unused_resources_target" ] } - _final_deps += [ ":$_copy_arsc_target" ] + } else { + not_needed(invoker, [ "resources_config_paths" ]) } if (!_is_bundle_module) { @@ -2611,26 +2915,27 @@ if (enable_java_templates) { outputs = [ _final_rtxt_path ] } _final_deps += [ ":$_copy_rtxt_target" ] + } - if (_short_resource_paths) { - # Do the same for path map - _copy_pathmap_target = "${_template_name}__copy_pathmap" - copy(_copy_pathmap_target) { - deps = [ ":$_compile_resources_target" ] - sources = [ _resources_path_map_out_path ] - outputs = [ _final_pathmap_path ] - - # The monochrome_public_apk_checker test needs pathmap when run on swarming. - data = [ _final_pathmap_path ] - } - _final_deps += [ ":$_copy_pathmap_target" ] + if (defined(_resource_sizes_arsc_path)) { + _copy_arsc_target = "${_template_name}__copy_arsc" + copy(_copy_arsc_target) { + deps = [ ":$_compile_resources_target" ] + + # resource_sizes.py doesn't care if it gets the optimized .arsc. + sources = [ _arsc_resources_path ] + outputs = [ _resource_sizes_arsc_path ] } + _final_deps += [ ":$_copy_arsc_target" ] } - _generate_native_libraries_java = - (!_is_bundle_module || _is_base_module) && - (_native_libs_deps != [] || _secondary_abi_native_libs_deps != []) && - !_uses_static_library_synchronized_proguard + if (defined(invoker.generate_native_libraries_java)) { + _generate_native_libraries_java = invoker.generate_native_libraries_java + } else { + _generate_native_libraries_java = + (!_is_bundle_module || _is_base_module) && !_omit_dex && + !defined(invoker.apk_under_test) + } if (_generate_native_libraries_java) { write_native_libraries_java("${_template_name}__native_libraries") { forward_variables_from(invoker, [ "main_component_library" ]) @@ -2638,14 +2943,22 @@ if (enable_java_templates) { # Do not add a dep on the generated_file target in order to avoid having # to build the native libraries before this target. The dependency is # instead captured via a depfile. - if (_native_libs_deps != []) { + if (_uses_static_library) { + _prefix = get_label_info(invoker.static_library_provider, + "target_gen_dir") + "/" + + get_label_info(invoker.static_library_provider, "name") + if (defined(invoker.static_library_provider_use_secondary_abi) && + invoker.static_library_provider_use_secondary_abi) { + native_libraries_list_file = "${_prefix}.secondary_abi_native_libs" + } else { + native_libraries_list_file = "${_prefix}.native_libs" + } + } else if (_native_libs_deps != []) { native_libraries_list_file = _shared_library_list_file - } else { + } else if (_secondary_abi_native_libs_deps != []) { native_libraries_list_file = _secondary_abi_shared_library_list_file } enable_chromium_linker = _use_chromium_linker - load_library_from_apk = _load_library_from_apk - use_modern_linker = _use_modern_linker use_final_fields = true } _srcjar_deps += [ ":${_template_name}__native_libraries" ] @@ -2660,6 +2973,11 @@ if (enable_java_templates) { _loadable_modules += _sanitizer_runtimes } + _assertions_implicitly_enabled = defined(invoker.custom_assertion_handler) + + # Many possible paths where we wouldn't use this variable. + not_needed([ "_assertions_implicitly_enabled" ]) + if (_generate_buildconfig_java) { generate_build_config_srcjar("${_template_name}__build_config_srcjar") { forward_variables_from(invoker, @@ -2667,15 +2985,17 @@ if (enable_java_templates) { "min_sdk_version", "isolated_splits_enabled", ]) - _bundles_supported = _is_bundle_module || _is_static_library_provider + _bundles_supported = _is_bundle_module if (defined(invoker.bundles_supported)) { _bundles_supported = invoker.bundles_supported } bundles_supported = _bundles_supported use_final_fields = true + assertions_implicitly_enabled = _assertions_implicitly_enabled enable_multidex = _enable_multidex is_incremental_install = _incremental_apk - if (defined(invoker.product_version_resources_dep)) { + if (defined(invoker.build_config_include_product_version_resource) && + invoker.build_config_include_product_version_resource) { resources_version_variable = "org.chromium.base.R.string.product_version" } @@ -2693,7 +3013,6 @@ if (enable_java_templates) { build_config = _build_config java_package = _package use_chromium_linker = _use_chromium_linker - use_modern_linker = _use_modern_linker deps = [ ":$_build_config_target" ] } _srcjar_deps += [ ":$_locale_target_name" ] @@ -2704,6 +3023,7 @@ if (enable_java_templates) { generate_jni_registration("${_template_name}__final_jni") { forward_variables_from(invoker, [ + "enable_jni_multiplexing", "enable_native_mocks", "require_native_mocks", ]) @@ -2712,30 +3032,25 @@ if (enable_java_templates) { } else { targets = [ ":$_template_name" ] } - if (_is_static_library_provider) { - foreach(_target, invoker.static_library_dependent_targets) { - targets += [ _target.name ] - } - } - if (defined(invoker.jni_registration_header)) { - header_output = invoker.jni_registration_header - } - if (defined(invoker.jni_sources_exclusions)) { - sources_exclusions = invoker.jni_sources_exclusions + if (defined(invoker.jni_file_exclusions)) { + file_exclusions = invoker.jni_file_exclusions } + prevent_header_output = true } _srcjar_deps += [ ":${_template_name}__final_jni" ] } else { - not_needed(invoker, - [ - "enable_native_mocks", - "jni_registration_header", - ]) + not_needed(invoker, [ "enable_native_mocks" ]) } - _java_target = "${_template_name}__java" + if (_is_bundle_module) { + _add_view_trace_events = + defined(invoker.add_view_trace_events) && + invoker.add_view_trace_events && enable_trace_event_bytecode_rewriting + } - java_library_impl(_java_target) { + # We cannot skip this target when omit_dex = true because it writes the + # build_config.json. + java_library_impl(_java_target_name) { forward_variables_from(invoker, [ "alternative_android_sdk_dep", @@ -2745,41 +3060,30 @@ if (enable_java_templates) { "apk_under_test", "base_module_target", "chromium_code", + "deps", "jacoco_never_instrument", "jar_excluded_patterns", "javac_args", + "mergeable_android_manifests", "native_lib_placeholders", + "parent_module_target", "processor_args_javac", "secondary_abi_loadable_modules", "secondary_native_lib_placeholders", "sources", - "static_library_dependent_targets", "library_always_compress", - "library_renames", ]) - deps = _deps - if (_uses_static_library_synchronized_proguard) { - if (!defined(jar_excluded_patterns)) { - jar_excluded_patterns = [] - } - - # The static library will provide all R.java files, but we still need to - # make the base module R.java files available at compile time since DFM - # R.java classes extend base module classes. - jar_excluded_patterns += [ - "*/R.class", - "*/R\$*.class", - ] - } + version_code = _version_code + version_name = _version_name if (_is_bundle_module) { type = "android_app_bundle_module" res_size_info_path = _res_size_info_path - is_base_module = _is_base_module - forward_variables_from(invoker, - [ - "version_code", - "version_name", - ]) + if (defined(invoker.module_name)) { + module_name = invoker.module_name + } else { + module_name = "base" + } + add_view_trace_events = _add_view_trace_events } else { type = "android_apk" } @@ -2788,6 +3092,7 @@ if (enable_java_templates) { supports_android = true requires_android = true srcjar_deps = _srcjar_deps + merged_android_manifest = _android_manifest if (defined(_final_dex_path)) { final_dex_path = _final_dex_path } @@ -2814,10 +3119,8 @@ if (enable_java_templates) { if (defined(invoker.proguard_configs)) { proguard_configs += invoker.proguard_configs } - if (_enable_main_dex_list) { - proguard_configs += [ "//build/android/multidex.flags" ] - } - if (!enable_java_asserts && (!defined(testonly) || !testonly) && + if (!_assertions_implicitly_enabled && !enable_java_asserts && + (!defined(testonly) || !testonly) && # Injected JaCoCo code causes -checkdiscards to fail. !use_jacoco_coverage) { proguard_configs += [ "//build/android/dcheck_is_off.flags" ] @@ -2840,204 +3143,106 @@ if (enable_java_templates) { loadable_modules = _loadable_modules - uncompress_shared_libraries = _uncompress_shared_libraries - if (defined(_allowlist_r_txt_path) && _is_bundle_module) { - # Used to write the file path to the target's .build_config only. + # Used to write the file path to the target's .build_config.json only. base_allowlist_rtxt_path = _allowlist_r_txt_path } } - # TODO(cjhopman): This is only ever needed to calculate the list of tests to - # run. See build/android/pylib/instrumentation/test_jar.py. We should be - # able to just do that calculation at build time instead. - if (defined(invoker.dist_ijar_path)) { - _dist_ijar_path = invoker.dist_ijar_path - dist_jar("${_template_name}_dist_ijar") { - override_build_config = _build_config - output = _dist_ijar_path - data = [ _dist_ijar_path ] - use_interface_jars = true - deps = [ - ":$_build_config_target", - ":$_java_target", - ] - } - } - - if (_uses_static_library_synchronized_proguard) { - _final_dex_target_dep = "${invoker.static_library_provider}__dexsplitter" - } else if (_is_bundle_module && _proguard_enabled) { - _final_deps += [ ":$_java_target" ] + if (_is_bundle_module || _omit_dex) { + # Dex generation for app bundle modules take place in the + # android_app_bundle template. + not_needed(invoker, [ "custom_assertion_handler" ]) } else if (_incremental_apk) { - if (defined(invoker.enable_proguard_checks)) { - not_needed(invoker, [ "enable_proguard_checks" ]) - } - if (defined(invoker.disable_r8_outlining)) { - not_needed(invoker, [ "disable_r8_outlining" ]) - } - if (defined(invoker.dexlayout_profile)) { - not_needed(invoker, [ "dexlayout_profile" ]) - } + not_needed(invoker, + [ + "enable_proguard_checks", + "custom_assertion_handler", + ]) } else { - # Dex generation for app bundle modules with proguarding enabled takes - # place later due to synchronized proguarding. For more details, - # read build/android/docs/android_app_bundles.md _final_dex_target_name = "${_template_name}__final_dex" dex(_final_dex_target_name) { forward_variables_from(invoker, [ - "disable_r8_outlining", - "dexlayout_profile", "enable_proguard_checks", + "custom_assertion_handler", "proguard_enable_obfuscation", ]) min_sdk_version = _min_sdk_version proguard_enabled = _proguard_enabled build_config = _build_config + output = _final_dex_path + enable_multidex = _enable_multidex deps = [ ":$_build_config_target", - ":$_java_target", + ":$_java_target_name", ] if (_proguard_enabled) { - deps += _deps + [ ":$_compile_resources_target" ] + # Generates proguard configs + deps += [ ":$_compile_resources_target" ] proguard_mapping_path = _proguard_mapping_path - proguard_sourcefile_suffix = "$android_channel-$_version_code" has_apk_under_test = defined(invoker.apk_under_test) - } else if (_min_sdk_version >= default_min_sdk_version) { - # Enable dex merging only when min_sdk_version is >= what the library - # .dex files were created with. - input_dex_filearg = - "@FileArg(${_rebased_build_config}:final_dex:all_dex_files)" } else { - input_classes_filearg = - "@FileArg($_rebased_build_config:deps_info:device_classpath)" - } - - if (_is_static_library_provider) { - # The list of input jars is already recorded in the .build_config, but - # we need to explicitly add the java deps here to ensure they're - # available to be used as inputs to the dex step. - foreach(_dep, invoker.static_library_dependent_targets) { - _target_label = get_label_info(_dep.name, "label_no_toolchain") - deps += [ "${_target_label}__java" ] + if (_min_sdk_version >= default_min_sdk_version) { + # Enable dex merging only when min_sdk_version is >= what the library + # .dex files were created with. + input_dex_filearg = + "@FileArg(${_rebased_build_config}:deps_info:all_dex_files)" + + # Pure dex-merge. + enable_desugar = false + } else { + input_classes_filearg = + "@FileArg($_rebased_build_config:deps_info:device_classpath)" } - output = _static_library_sync_dex_path - is_static_library = true - } else { - output = _final_dex_path } - enable_multidex = _enable_multidex # The individual dependencies would have caught real missing deps in # their respective dex steps. False positives that were suppressed at # per-target dex steps are emitted here since this may use jar files # rather than dex files. - ignore_desugar_missing_deps = true + if (!defined(enable_desugar)) { + ignore_desugar_missing_deps = true + } if (_enable_main_dex_list) { - extra_main_dex_proguard_config = _generated_proguard_main_dex_config + # Generates main-dex config. deps += [ ":$_compile_resources_target" ] + extra_main_dex_proguard_config = _generated_proguard_main_dex_config } } _final_dex_target_dep = ":$_final_dex_target_name" - # For static libraries, a single Proguard run is performed that includes - # code from the static library APK and the APKs that use the static - # library (done via. classpath merging in write_build_config.py). - # This dexsplitter target splits the synchronized dex output into dex - # files for each APK/Bundle. In the Bundle case, another dexsplitter step - # is later performed to split the dex further for each feature module. - if (_is_static_library_provider && _proguard_enabled) { - _static_library_modules = [] - foreach(_target, invoker.static_library_dependent_targets) { - _apk_as_module = _target.name - _module_config_target = "${_apk_as_module}$build_config_target_suffix" - _module_gen_dir = get_label_info(_apk_as_module, "target_gen_dir") - _module_name = get_label_info(_apk_as_module, "name") - _module_config = "$_module_gen_dir/$_module_name.build_config" - _static_library_modules += [ - { - name = _module_name - build_config = _module_config - build_config_target = _module_config_target - }, - ] - } - - _static_library_dexsplitter_target = "${_template_name}__dexsplitter" - dexsplitter(_static_library_dexsplitter_target) { - input_dex_zip = _static_library_sync_dex_path - proguard_mapping = _proguard_mapping_path - deps = [ - ":$_build_config_target", - "$_final_dex_target_dep", - ] - all_modules = [ - { - name = "base" - build_config = _build_config - build_config_target = ":$_build_config_target" - }, - ] + _static_library_modules - feature_jars_args = [ - "--feature-jars", - "@FileArg($_rebased_build_config:deps_info:" + - "static_library_dependent_classpath_configs:" + - "$_rebased_build_config)", - ] - foreach(_module, _static_library_modules) { - _rebased_module_config = - rebase_path(_module.build_config, root_build_dir) - feature_jars_args += [ - "--feature-jars", - "@FileArg($_rebased_build_config:deps_info:" + - "static_library_dependent_classpath_configs:" + - "$_rebased_module_config)", - ] - } - } - _final_deps += [ ":$_static_library_dexsplitter_target" ] - _validate_dex_target = "${_template_name}__validate_dex" - action_with_pydeps(_validate_dex_target) { - depfile = "$target_gen_dir/$target_name.d" - script = - "//build/android/gyp/validate_static_library_dex_references.py" - inputs = [ _build_config ] - _stamp = "$target_gen_dir/$target_name.stamp" - outputs = [ _stamp ] + _use_baseline_profile = + _proguard_enabled && defined(invoker.baseline_profile_path) && + enable_baseline_profiles + if (_use_baseline_profile) { + _binary_profile_target = "${_template_name}__binary_baseline_profile" + _binary_baseline_profile_path = + "$target_out_dir/$_template_name.baseline.prof" + _binary_baseline_profile_metadata_path = + _binary_baseline_profile_path + "m" + create_binary_profile(_binary_profile_target) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + binary_baseline_profile_path = _binary_baseline_profile_path + binary_baseline_profile_metadata_path = + _binary_baseline_profile_metadata_path + proguard_mapping_path = _proguard_mapping_path + build_config = _build_config + input_profile_path = invoker.baseline_profile_path deps = [ ":$_build_config_target", - ":$_static_library_dexsplitter_target", - ] - args = [ - "--depfile", - rebase_path(depfile, root_build_dir), - "--stamp", - rebase_path(_stamp, root_build_dir), - "--static-library-dex", - "@FileArg($_rebased_build_config:final_dex:path)", + _final_dex_target_dep, ] - foreach(_module, _static_library_modules) { - inputs += [ _module.build_config ] - _rebased_config = rebase_path(_module.build_config, root_build_dir) - deps += [ _module.build_config_target ] - args += [ - "--static-library-dependent-dex", - "@FileArg($_rebased_config:final_dex:path)", - ] - } } - - # TODO(crbug.com/1032609): Switch to using R8's support for feature - # aware ProGuard and get rid of "_validate_dex_target" or figure out - # why some classes aren't properly being kept. - # _final_deps += [ ":$_validate_dex_target" ] - _final_dex_target_dep = ":$_static_library_dexsplitter_target" } } + if (!defined(_use_baseline_profile) || !_use_baseline_profile) { + not_needed(invoker, [ "baseline_profile_path" ]) + } + _all_native_libs_deps = _native_libs_deps + _secondary_abi_native_libs_deps if (_all_native_libs_deps != []) { _native_libs_filearg_dep = ":$_build_config_target" @@ -3051,10 +3256,13 @@ if (enable_java_templates) { if (_is_bundle_module) { _final_deps += [ - ":$_merge_manifest_target", ":$_build_config_target", ":$_compile_resources_target", + ":$_merge_manifest_target", ] + _all_native_libs_deps + if (_optimize_resources) { + _final_deps += [ ":$_optimize_resources_target" ] + } if (defined(_final_dex_target_dep)) { not_needed([ "_final_dex_target_dep" ]) } @@ -3071,11 +3279,11 @@ if (enable_java_templates) { name = "${invoker.name}.apk" build_config = _build_config res_size_info_path = _res_size_info_path - deps = _deps + [ - ":$_build_config_target", - ":$_compile_resources_target", - ":$_java_target", - ] + deps = [ + ":$_build_config_target", + ":$_compile_resources_target", + ":$_java_target_name", + ] } _final_deps += [ ":$_size_info_target" ] } else { @@ -3083,51 +3291,6 @@ if (enable_java_templates) { } } - _keystore_path = android_keystore_path - _keystore_name = android_keystore_name - _keystore_password = android_keystore_password - - if (defined(invoker.keystore_path)) { - _keystore_path = invoker.keystore_path - _keystore_name = invoker.keystore_name - _keystore_password = invoker.keystore_password - } - - if (_incremental_apk) { - _incremental_compiled_resources_path = "${_base_path}_incremental.ap_" - _incremental_compile_resources_target_name = - "${target_name}__compile_incremental_resources" - - action_with_pydeps(_incremental_compile_resources_target_name) { - deps = [ - ":$_build_config_target", - ":$_compile_resources_target", - ":$_merge_manifest_target", - ] - script = - "//build/android/incremental_install/generate_android_manifest.py" - inputs = [ - _android_manifest, - _build_config, - _arsc_resources_path, - ] - outputs = [ _incremental_compiled_resources_path ] - - args = [ - "--disable-isolated-processes", - "--src-manifest", - rebase_path(_android_manifest, root_build_dir), - "--in-apk", - rebase_path(_arsc_resources_path, root_build_dir), - "--out-apk", - rebase_path(_incremental_compiled_resources_path, root_build_dir), - "--aapt2-path", - rebase_path(android_sdk_tools_bundle_aapt2, root_build_dir), - "--android-sdk-jars=@FileArg($_rebased_build_config:android:sdk_jars)", - ] - } - } - _create_apk_target = "${_template_name}__create" _final_deps += [ ":$_create_apk_target" ] package_apk("$_create_apk_target") { @@ -3135,13 +3298,14 @@ if (enable_java_templates) { [ "expected_libs_and_assets", "expected_libs_and_assets_base", + "keystore_name", + "keystore_path", + "keystore_password", "native_lib_placeholders", "secondary_abi_loadable_modules", "secondary_native_lib_placeholders", "uncompress_dex", - "uncompress_shared_libraries", "library_always_compress", - "library_renames", ]) if (defined(expected_libs_and_assets)) { @@ -3150,32 +3314,27 @@ if (enable_java_templates) { } build_config = _build_config - keystore_name = _keystore_name - keystore_path = _keystore_path - keystore_password = _keystore_password min_sdk_version = _min_sdk_version - uncompress_shared_libraries = _uncompress_shared_libraries + packaged_resources_path = _arsc_resources_path - deps = _deps + [ ":$_build_config_target" ] - - if ((!_proguard_enabled || _incremental_apk) && - enable_jdk_library_desugaring) { - _all_jdk_libs = "//build/android:all_jdk_libs" - deps += [ _all_jdk_libs ] - jdk_libs_dex = get_label_info(_all_jdk_libs, "target_out_dir") + - "/all_jdk_libs.l8.dex" - } + # Need full deps rather than _non_java_deps, because loadable_modules + # may include .so files extracted by __unpack_aar targets. + deps = _invoker_deps + [ ":$_build_config_target" ] if (_incremental_apk) { _dex_target = "//build/android/incremental_install:apk_dex" deps += [ - ":${_incremental_compile_resources_target_name}", + ":$_compile_resources_target", _dex_target, ] dex_path = get_label_info(_dex_target, "target_out_dir") + "/apk.dex" + # Incremental APKs cannot be installed via `adb install` as such they + # should be clearly named/labeled "incremental". + output_apk_path = _incremental_apk_path + # All native libraries are side-loaded, so use a placeholder to force # the proper bitness for the app. _has_native_libs = @@ -3183,32 +3342,34 @@ if (enable_java_templates) { if (_has_native_libs && !defined(native_lib_placeholders)) { native_lib_placeholders = [ "libfix.crbug.384638.so" ] } - - packaged_resources_path = _incremental_compiled_resources_path - output_apk_path = _incremental_apk_path } else { loadable_modules = _loadable_modules deps += _all_native_libs_deps + [ - ":$_merge_manifest_target", ":$_compile_resources_target", + ":$_merge_manifest_target", ] if (defined(_final_dex_path)) { dex_path = _final_dex_path deps += [ _final_dex_target_dep ] + if (_use_baseline_profile) { + # extra_assets is a list of ["{src_path}:{dst_path}"] + extra_assets = [ + rebase_path(_binary_baseline_profile_path, root_build_dir) + + ":dexopt/baseline.prof", + rebase_path(_binary_baseline_profile_metadata_path, + root_build_dir) + ":dexopt/baseline.profm", + ] + deps += [ ":$_binary_profile_target" ] + } } - if (_optimize_resources) { - packaged_resources_path = _optimized_arsc_resources_path - } else { - packaged_resources_path = _arsc_resources_path - } + output_apk_path = _final_apk_path if (defined(_native_libs_filearg)) { native_libs_filearg = _native_libs_filearg secondary_abi_native_libs_filearg = "@FileArg($_rebased_build_config:native:secondary_abi_libraries)" } - output_apk_path = _final_apk_path } } } @@ -3230,7 +3391,7 @@ if (enable_java_templates) { args = [ "--apk-path=$_rebased_incremental_apk_path", "--output-path=$_rebased_incremental_install_json_path", - "--dex-file=@FileArg($_rebased_build_config:final_dex:all_dex_files)", + "--dex-file=@FileArg($_rebased_build_config:deps_info:all_dex_files)", ] if (_proguard_enabled) { args += [ "--show-proguard-warning" ] @@ -3245,10 +3406,7 @@ if (enable_java_templates) { args += [ "--native-libs=$_rebased_loadable_modules" ] } } - _final_deps += [ - ":$_java_target", - ":$_write_installer_json_rule_name", - ] + _final_deps += [ ":$_write_installer_json_rule_name" ] } # Generate apk operation related script. @@ -3335,7 +3493,9 @@ if (enable_java_templates) { ]) build_config = _build_config build_config_dep = ":$_build_config_target" - deps = [ ":$_java_target" ] + + # This will use library subtargets under-the-hood + deps = [ ":$_java_target_name" ] if (defined(invoker.lint_suppressions_dep)) { deps += [ invoker.lint_suppressions_dep ] } @@ -3347,6 +3507,7 @@ if (enable_java_templates) { not_needed(invoker, [ "lint_baseline_file", + "lint_jar_path", "lint_min_sdk_version", "lint_suppressions_dep", "lint_suppressions_file", @@ -3370,8 +3531,7 @@ if (enable_java_templates) { } # Include unstripped native libraries so tests can symbolize stacks. - data_deps += _all_native_libs_deps - + data_deps += _all_native_libs_deps + [ ":${_java_target_name}__validate" ] if (_enable_lint) { data_deps += [ ":${target_name}__lint" ] } @@ -3413,102 +3573,97 @@ if (enable_java_templates) { # TODO(crbug.com/1042017): Remove. not_needed(invoker, [ "no_build_hooks" ]) android_apk_or_module(target_name) { - forward_variables_from(invoker, - [ - "aapt_locale_allowlist", - "additional_jar_files", - "alternative_android_sdk_dep", - "android_manifest", - "android_manifest_dep", - "annotation_processor_deps", - "apk_under_test", - "app_as_shared_lib", - "assert_no_deps", - "bundles_supported", - "chromium_code", - "command_line_flags_file", - "create_apk_script", - "data", - "data_deps", - "deps", - "dexlayout_profile", - "disable_r8_outlining", - "dist_ijar_path", - "enable_lint", - "enable_multidex", - "enable_native_mocks", - "enable_proguard_checks", - "enforce_resource_overlays_in_tests", - "expected_android_manifest", - "expected_android_manifest_base", - "expected_libs_and_assets", - "expected_libs_and_assets_base", - "generate_buildconfig_java", - "generate_final_jni", - "include_size_info", - "input_jars_paths", - "use_modern_linker", - "jacoco_never_instrument", - "javac_args", - "jni_registration_header", - "jni_sources_exclusions", - "keystore_name", - "keystore_password", - "keystore_path", - "lint_baseline_file", - "lint_min_sdk_version", - "lint_suppressions_dep", - "lint_suppressions_file", - "load_library_from_apk", - "loadable_modules", - "manifest_package", - "max_sdk_version", - "product_config_java_packages", - "main_component_library", - "min_sdk_version", - "native_lib_placeholders", - "never_incremental", - "no_xml_namespaces", - "png_to_webp", - "post_process_package_resources_script", - "processor_args_javac", - "product_version_resources_dep", - "proguard_configs", - "proguard_enabled", - "proguard_enable_obfuscation", - "r_java_root_package_name", - "resource_exclusion_exceptions", - "resource_exclusion_regex", - "resource_ids_provider_dep", - "resource_values_filter_rules", - "resources_config_paths", - "require_native_mocks", - "secondary_abi_loadable_modules", - "secondary_abi_shared_libraries", - "secondary_native_lib_placeholders", - "shared_libraries", - "shared_resources", - "shared_resources_allowlist_locales", - "shared_resources_allowlist_target", - "short_resource_paths", - "sources", - "srcjar_deps", - "static_library_dependent_targets", - "static_library_provider", - "static_library_synchronized_proguard", - "strip_resource_names", - "support_zh_hk", - "target_sdk_version", - "testonly", - "uncompress_dex", - "uncompress_shared_libraries", - "library_always_compress", - "library_renames", - "use_chromium_linker", - "version_code", - "version_name", - "visibility", - ]) + forward_variables_from( + invoker, + [ + "aapt_locale_allowlist", + "additional_jar_files", + "alternative_android_sdk_dep", + "android_manifest", + "android_manifest_dep", + "annotation_processor_deps", + "apk_under_test", + "app_as_shared_lib", + "assert_no_deps", + "baseline_profile_path", + "build_config_include_product_version_resource", + "bundles_supported", + "chromium_code", + "command_line_flags_file", + "create_apk_script", + "custom_assertion_handler", + "data", + "data_deps", + "deps", + "enable_lint", + "enable_jni_multiplexing", + "enable_multidex", + "enable_native_mocks", + "enable_proguard_checks", + "enforce_resource_overlays_in_tests", + "expected_android_manifest", + "expected_android_manifest_base", + "expected_android_manifest_library_version_offset", + "expected_android_manifest_version_code_offset", + "expected_libs_and_assets", + "expected_libs_and_assets_base", + "generate_buildconfig_java", + "generate_final_jni", + "generate_native_libraries_java", + "include_size_info", + "input_jars_paths", + "jacoco_never_instrument", + "javac_args", + "jni_file_exclusions", + "keystore_name", + "keystore_password", + "keystore_path", + "lint_baseline_file", + "lint_min_sdk_version", + "lint_suppressions_dep", + "lint_suppressions_file", + "loadable_modules", + "manifest_package", + "max_sdk_version", + "mergeable_android_manifests", + "product_config_java_packages", + "main_component_library", + "min_sdk_version", + "native_lib_placeholders", + "never_incremental", + "omit_dex", + "png_to_webp", + "post_process_package_resources_script", + "processor_args_javac", + "proguard_configs", + "proguard_enabled", + "proguard_enable_obfuscation", + "r_java_root_package_name", + "resource_exclusion_exceptions", + "resource_exclusion_regex", + "resource_ids_provider_dep", + "resource_values_filter_rules", + "require_native_mocks", + "secondary_abi_loadable_modules", + "secondary_abi_shared_libraries", + "secondary_native_lib_placeholders", + "shared_libraries", + "shared_resources", + "shared_resources_allowlist_locales", + "shared_resources_allowlist_target", + "sources", + "srcjar_deps", + "static_library_provider", + "static_library_provider_use_secondary_abi", + "target_sdk_version", + "testonly", + "uncompress_dex", + "library_always_compress", + "use_chromium_linker", + "version_code", + "version_name", + "visibility", + ]) is_bundle_module = false name = invoker.apk_name if (defined(invoker.final_apk_path)) { @@ -3551,6 +3706,15 @@ if (enable_java_templates) { assert(!defined(invoker.bundle_target)) } + # android_app_bundle's write_build_config expects module targets to be named + # according to java_target_patterns otherwise it ignores them when listed in + # possible_config_deps. See https://crbug.com/1418398. + if (filter_exclude([ target_name ], [ "*_bundle_module" ]) != []) { + assert(false, + "Invalid android_app_bundle_module target name ($target_name), " + + "must end in _bundle_module.") + } + # TODO(tiborg): We have several flags that are necessary for workarounds # that come from the fact that the resources get compiled in the bundle # module target, but bundle modules have to have certain flags in @@ -3560,85 +3724,91 @@ if (enable_java_templates) { # target. Doing so would keep the bundle modules independent from the bundle # and potentially reuse the same bundle modules for multiple bundles. android_apk_or_module(target_name) { - forward_variables_from(invoker, - [ - "aapt_locale_allowlist", - "additional_jar_files", - "alternative_android_sdk_dep", - "android_manifest", - "android_manifest_dep", - "annotation_processor_deps", - "app_as_shared_lib", - "assert_no_deps", - "base_module_target", - "bundle_target", - "chromium_code", - "data", - "data_deps", - "deps", - "enable_multidex", - "expected_android_manifest", - "expected_android_manifest_base", - "extra_verification_manifest", - "extra_verification_manifest_dep", - "generate_buildconfig_java", - "generate_final_jni", - "input_jars_paths", - "isolated_splits_enabled", - "is_base_module", - "jacoco_never_instrument", - "jar_excluded_patterns", - "javac_args", - "jni_registration_header", - "jni_sources_exclusions", - "load_library_from_apk", - "loadable_modules", - "product_config_java_packages", - "manifest_package", - "max_sdk_version", - "min_sdk_version", - "native_lib_placeholders", - "no_xml_namespaces", - "package_id", - "package_name", - "png_to_webp", - "processor_args_javac", - "product_version_resources_dep", - "proguard_configs", - "proguard_enabled", - "proguard_enable_obfuscation", - "resource_exclusion_exceptions", - "resource_exclusion_regex", - "resource_ids_provider_dep", - "resource_values_filter_rules", - "resources_config_paths", - "secondary_abi_loadable_modules", - "secondary_abi_shared_libraries", - "secondary_native_lib_placeholders", - "shared_libraries", - "shared_resources", - "shared_resources_allowlist_locales", - "shared_resources_allowlist_target", - "short_resource_paths", - "srcjar_deps", - "static_library_provider", - "static_library_synchronized_proguard", - "strip_resource_names", - "support_zh_hk", - "target_sdk_version", - "testonly", - "uncompress_shared_libraries", - "library_always_compress", - "library_renames", - "use_chromium_linker", - "use_modern_linker", - "uses_split", - "version_code", - "version_name", - "visibility", - ]) + forward_variables_from( + invoker, + [ + "add_view_trace_events", + "aapt_locale_allowlist", + "additional_jar_files", + "alternative_android_sdk_dep", + "android_manifest", + "android_manifest_dep", + "annotation_processor_deps", + "app_as_shared_lib", + "assert_no_deps", + "base_module_target", + "build_config_include_product_version_resource", + "bundle_target", + "chromium_code", + "custom_assertion_handler", + "data", + "data_deps", + "deps", + "enable_jni_multiplexing", + "enable_multidex", + "expected_android_manifest", + "expected_android_manifest_base", + "expected_android_manifest_library_version_offset", + "expected_android_manifest_version_code_offset", + "generate_buildconfig_java", + "generate_final_jni", + "generate_native_libraries_java", + "input_jars_paths", + "isolated_splits_enabled", + "is_base_module", + "jacoco_never_instrument", + "jar_excluded_patterns", + "javac_args", + "jni_file_exclusions", + "loadable_modules", + "product_config_java_packages", + "main_component_library", + "manifest_package", + "max_sdk_version", + "min_sdk_version", + "mergeable_android_manifests", + "module_name", + "native_lib_placeholders", + "package_id", + "parent_module_target", + "png_to_webp", + "processor_args_javac", + "proguard_configs", + "proguard_enabled", + "proguard_enable_obfuscation", + "resource_exclusion_exceptions", + "resource_exclusion_regex", + "resource_ids_provider_dep", + "resource_values_filter_rules", + "resources_config_paths", + "secondary_abi_loadable_modules", + "secondary_abi_shared_libraries", + "secondary_native_lib_placeholders", + "shared_libraries", + "shared_resources", + "shared_resources_allowlist_locales", + "shared_resources_allowlist_target", + "short_resource_paths", + "srcjar_deps", + "static_library_provider", + "static_library_provider_use_secondary_abi", + "strip_resource_names", + "strip_unused_resources", + "target_sdk_version", + "testonly", + "library_always_compress", + "use_chromium_linker", + "uses_split", + "version_code", + "version_name", + "visibility", + ]) is_bundle_module = true generate_buildconfig_java = _is_base_module + if (defined(uses_split)) { + assert(defined(parent_module_target), + "Must set parent_module_target when uses_split is set") + } } } @@ -3648,7 +3818,6 @@ if (enable_java_templates) { # # Arguments: # android_test_apk: The target containing the tests. - # android_test_apk_name: The apk_name in android_test_apk # # The following args are optional: # apk_under_test: The target being tested. @@ -3668,7 +3837,6 @@ if (enable_java_templates) { # Example # instrumentation_test_runner("foo_test_for_bar") { # android_test_apk: ":foo" - # android_test_apk_name: "Foo" # apk_under_test: ":bar" # } template("instrumentation_test_runner") { @@ -3676,8 +3844,97 @@ if (enable_java_templates) { action("${invoker.target_name}__rts_filters") { script = "//build/add_rts_filters.py" rts_file = "${root_build_dir}/gen/rts/${invoker.target_name}.filter" - args = [ rebase_path(rts_file, root_build_dir) ] - outputs = [ rts_file ] + inverted_rts_file = + "${root_build_dir}/gen/rts/${invoker.target_name}_inverted.filter" + args = [ + rebase_path(rts_file, root_build_dir), + rebase_path(inverted_rts_file, root_build_dir), + ] + outputs = [ + rts_file, + inverted_rts_file, + ] + } + } + _incremental_apk = !(defined(invoker.never_incremental) && + invoker.never_incremental) && incremental_install + _apk_operations_target_name = "${target_name}__apk_operations" + _apk_target = invoker.android_test_apk + if (defined(invoker.apk_under_test) && !_incremental_apk) { + # The actual target is defined in the test_runner_script template. + _install_artifacts_json = + "${target_gen_dir}/${target_name}.install_artifacts" + _install_artifacts_target_name = "${target_name}__install_artifacts" + } + + action_with_pydeps(_apk_operations_target_name) { + testonly = true + script = "//build/android/gyp/create_test_apk_wrapper_script.py" + deps = [] + _generated_script = "$root_build_dir/bin/${invoker.target_name}" + outputs = [ _generated_script ] + _apk_build_config = + get_label_info(_apk_target, "target_gen_dir") + "/" + + get_label_info(_apk_target, "name") + ".build_config.json" + _rebased_apk_build_config = rebase_path(_apk_build_config, root_build_dir) + args = [ + "--script-output-path", + rebase_path(_generated_script, root_build_dir), + "--package-name", + "@FileArg($_rebased_apk_build_config:deps_info:package_name)", + ] + deps += [ "${_apk_target}$build_config_target_suffix" ] + if (_incremental_apk) { + args += [ + "--test-apk-incremental-install-json", + "@FileArg($_rebased_apk_build_config:deps_info:incremental_install_json_path)", + ] + } else { + args += [ + "--test-apk", + "@FileArg($_rebased_apk_build_config:deps_info:apk_path)", + ] + } + if (defined(invoker.proguard_mapping_path) && !_incremental_apk) { + args += [ + "--proguard-mapping-path", + rebase_path(invoker.proguard_mapping_path, root_build_dir), + ] + } + if (defined(invoker.apk_under_test)) { + if (_incremental_apk) { + deps += [ "${invoker.apk_under_test}$build_config_target_suffix" ] + _apk_under_test_build_config = + get_label_info(invoker.apk_under_test, "target_gen_dir") + "/" + + get_label_info(invoker.apk_under_test, "name") + + ".build_config.json" + _rebased_apk_under_test_build_config = + rebase_path(_apk_under_test_build_config, root_build_dir) + _apk_under_test = "@FileArg($_rebased_apk_under_test_build_config:deps_info:incremental_apk_path)" + } else { + deps += [ ":${_install_artifacts_target_name}" ] + _rebased_install_artifacts_json = + rebase_path(_install_artifacts_json, root_build_dir) + _apk_under_test = "@FileArg($_rebased_install_artifacts_json[])" + } + args += [ + "--additional-apk", + _apk_under_test, + ] + } + if (defined(invoker.additional_apks)) { + foreach(additional_apk, invoker.additional_apks) { + deps += [ "$additional_apk$build_config_target_suffix" ] + _build_config = + get_label_info(additional_apk, "target_gen_dir") + "/" + + get_label_info(additional_apk, "name") + ".build_config.json" + _rebased_build_config = rebase_path(_build_config, root_build_dir) + args += [ + "--additional-apk", + "@FileArg($_rebased_build_config:deps_info:apk_path)", + ] + } + deps += invoker.additional_apks } } test_runner_script(target_name) { @@ -3692,25 +3949,19 @@ if (enable_java_templates) { "extra_args", "fake_modules", "ignore_all_data_deps", + "is_unit_test", "modules", - "proguard_enabled", - "public_deps", + "proguard_mapping_path", "use_webview_provider", ]) test_name = invoker.target_name test_type = "instrumentation" - _apk_target_name = get_label_info(invoker.android_test_apk, "name") - apk_target = ":$_apk_target_name" - test_jar = "$root_build_dir/test.lib.java/" + - invoker.android_test_apk_name + ".jar" - incremental_apk = !(defined(invoker.never_incremental) && - invoker.never_incremental) && incremental_install + apk_target = invoker.android_test_apk + incremental_apk = _incremental_apk public_deps = [ - ":$_apk_target_name", - - # Required by test runner to enumerate test list. - ":${_apk_target_name}_dist_ijar", + ":$_apk_operations_target_name", + apk_target, ] if (defined(invoker.apk_under_test)) { public_deps += [ invoker.apk_under_test ] @@ -3815,7 +4066,6 @@ if (enable_java_templates) { data += [ "$_final_apk_path.mapping" ] } - dist_ijar_path = "$root_build_dir/test.lib.java/${invoker.apk_name}.jar" create_apk_script = false forward_variables_from(invoker, @@ -3824,6 +4074,8 @@ if (enable_java_templates) { "data", "data_deps", "deps", + "extra_args", + "is_unit_test", "proguard_configs", ]) } @@ -3852,15 +4104,17 @@ if (enable_java_templates) { "deps", "extra_args", "ignore_all_data_deps", + "is_unit_test", "modules", "never_incremental", - "proguard_enabled", - "proguard_enable_obfuscation", "public_deps", "use_webview_provider", ]) android_test_apk = ":${_apk_target_name}" - android_test_apk_name = invoker.apk_name + if (defined(invoker.proguard_enabled) && invoker.proguard_enabled) { + proguard_mapping_path = + "$root_build_dir/apks/${invoker.apk_name}.apk.mapping" + } } } @@ -3874,9 +4128,11 @@ if (enable_java_templates) { # resource dependencies of the apk. # shared_library: shared_library target that contains the unit tests. # apk_name: The name of the produced apk. If unspecified, it uses the name - # of the shared_library target suffixed with "_apk" + # of the shared_library target suffixed with "_apk". # use_default_launcher: Whether the default activity (NativeUnitTestActivity) # should be used for launching tests. + # allow_cleartext_traffic: (Optional) Whether to allow cleartext network + # requests during the test. # use_native_activity: Test implements ANativeActivity_onCreate(). # # Example @@ -3896,6 +4152,8 @@ if (enable_java_templates) { assert(_use_native_activity != "" && _android_manifest != "") if (!defined(invoker.android_manifest)) { + _allow_cleartext_traffic = defined(invoker.allow_cleartext_traffic) && + invoker.allow_cleartext_traffic jinja_template("${target_name}_manifest") { _native_library_name = get_label_info(invoker.shared_library, "name") if (defined(invoker.android_manifest_template)) { @@ -3909,6 +4167,7 @@ if (enable_java_templates) { "is_component_build=${is_component_build}", "native_library_name=${_native_library_name}", "use_native_activity=${_use_native_activity}", + "allow_cleartext_traffic=${_allow_cleartext_traffic}", ] } } @@ -3942,7 +4201,6 @@ if (enable_java_templates) { if (!defined(use_default_launcher) || use_default_launcher) { deps += [ - "//base:base_java", "//build/android/gtest_apk:native_test_instrumentation_test_runner_java", "//testing/android/native_test:native_test_java", ] @@ -4040,6 +4298,10 @@ if (enable_java_templates) { # absolute paths, such as for third_party or generated .proto files. # http://crbug.com/691451 tracks fixing this. # + # generator_plugin_label (optional) + # GN label for plugin executable which generates custom cc stubs. + # Don't specify a toolchain, host toolchain is assumed. + # # Example: # proto_java_library("foo_proto_java") { # proto_path = "src/foo" @@ -4056,24 +4318,47 @@ if (enable_java_templates) { _srcjar_path = "$target_gen_dir/$target_name.srcjar" script = "//build/protoc_java.py" - deps = [] if (defined(invoker.deps)) { - deps += invoker.deps + # Need to care only about targets that might generate .proto files. + # No need to depend on java_library or android_resource targets. + deps = filter_exclude(invoker.deps, java_target_patterns) } sources = invoker.sources depfile = "$target_gen_dir/$target_name.d" outputs = [ _srcjar_path ] args = [ - "--depfile", - rebase_path(depfile, root_build_dir), - "--protoc", - rebase_path(android_protoc_bin, root_build_dir), - "--proto-path", - rebase_path(invoker.proto_path, root_build_dir), - "--srcjar", - rebase_path(_srcjar_path, root_build_dir), - ] + rebase_path(sources, root_build_dir) + "--depfile", + rebase_path(depfile, root_build_dir), + "--protoc", + rebase_path(android_protoc_bin, root_build_dir), + "--proto-path", + rebase_path(invoker.proto_path, root_build_dir), + "--srcjar", + rebase_path(_srcjar_path, root_build_dir), + ] + + if (defined(invoker.generator_plugin_label)) { + if (host_os == "win") { + _host_executable_suffix = ".exe" + } else { + _host_executable_suffix = "" + } + + _plugin_host_label = + invoker.generator_plugin_label + "($host_toolchain)" + _plugin_path = + get_label_info(_plugin_host_label, "root_out_dir") + "/" + + get_label_info(_plugin_host_label, "name") + _host_executable_suffix + args += [ + "--plugin", + rebase_path(_plugin_path, root_build_dir), + ] + deps += [ _plugin_host_label ] + inputs = [ _plugin_path ] + } + + args += rebase_path(sources, root_build_dir) if (defined(invoker.import_dirs)) { foreach(_import_dir, invoker.import_dirs) { @@ -4096,6 +4381,81 @@ if (enable_java_templates) { } } + # Compile a flatbuffer to java. + # + # This generates java files from flat buffers and creates an Android library + # containing the classes. + # + # Variables + # sources (required) + # Paths to .fbs files to compile. + # + # root_dir (required) + # Root directory of .fbs files. + # + # deps (optional) + # Additional dependencies. Passed through to both the action and the + # android_library targets. + # + # flatc_include_dirs (optional) + # A list of extra import directories to be passed to flatc compiler. + # + # + # Example: + # flatbuffer_java_library("foo_flatbuffer_java") { + # root_dir = "src/foo" + # sources = [ "$proto_path/foo.fbs" ] + # } + template("flatbuffer_java_library") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + _template_name = target_name + _flatc_dep = "//third_party/flatbuffers:flatc($host_toolchain)" + _flatc_out_dir = get_label_info(_flatc_dep, "root_out_dir") + _flatc_bin = "$_flatc_out_dir/flatc" + + action_with_pydeps("${_template_name}__flatc_java") { + _srcjar_path = "$target_gen_dir/$target_name.srcjar" + script = "//build/android/gyp/flatc_java.py" + + deps = [ _flatc_dep ] + if (defined(invoker.deps)) { + deps += invoker.deps + } + inputs = [ _flatc_bin ] + + sources = invoker.sources + outputs = [ _srcjar_path ] + args = [ + "--flatc", + rebase_path(_flatc_bin, root_build_dir), + "--import-dir", + rebase_path(invoker.root_dir, root_build_dir), + "--srcjar", + rebase_path(_srcjar_path, root_build_dir), + ] + rebase_path(sources, root_build_dir) + + if (defined(invoker.flatc_include_dirs)) { + foreach(_include_dir, invoker.flatc_include_dirs) { + args += [ + "--import-dir", + rebase_path(_include_dir, root_build_dir), + ] + } + } + } + + android_library(target_name) { + chromium_code = false + sources = [] + srcjar_deps = [ ":${_template_name}__flatc_java" ] + deps = [ "//third_party/flatbuffers:flatbuffers_java" ] + if (defined(invoker.deps)) { + deps += invoker.deps + } + } + } + # Declare an Android library target for a prebuilt AAR. # # This target creates an Android library containing java code and Android @@ -4118,9 +4478,6 @@ if (enable_java_templates) { # ignore_native_libraries: Whether to ignore .so files found in the .aar. # See also extract_native_libraries. # ignore_proguard_configs: Whether to ignore proguard configs. - # ignore_info_updates: Whether to ignore the info file when - # update_android_aar_prebuilts gn arg is true. However, the info file - # will still be verified regardless of the value of this flag. # strip_resources: Whether to ignore android resources found in the .aar. # custom_package: Java package for generated R.java files. # extract_native_libraries: Whether to extract .so files found in the .aar. @@ -4140,8 +4497,14 @@ if (enable_java_templates) { _info_path = invoker.info_path } _output_path = "${target_out_dir}/${target_name}" + + # Some targets only differ by _java with other targets so _java and _junit + # need to be replaced by non-empty strings to avoid duplicate targets. (e.g. + # androidx_window_window_java vs androidx_window_window_java_java). _target_name_without_java_or_junit = - string_replace(string_replace(target_name, "_java", ""), "_junit", "") + string_replace(string_replace(target_name, "_java", "_J"), + "_junit", + "_U") # This unpack target is a python action, not a valid java target. Since the # java targets below depend on it, its name must not match the java patterns @@ -4164,20 +4527,25 @@ if (enable_java_templates) { # to keep the logic for generated 'android_aar_prebuilt' rules simple. not_needed(invoker, [ "resource_overlay" ]) - _ignore_info_updates = - defined(invoker.ignore_info_updates) && invoker.ignore_info_updates + _aar_common_args = [ rebase_path(invoker.aar_path, root_build_dir) ] + if (_strip_resources) { + _aar_common_args += [ "--ignore-resources" ] + } + if (defined(invoker.resource_exclusion_globs)) { + _aar_common_args += + [ "--resource-exclusion-globs=${invoker.resource_exclusion_globs}" ] + } # Scan the AAR file and determine the resources and jar files. # Some libraries might not have resources; others might have two jars. - if (!_ignore_info_updates && update_android_aar_prebuilts) { + if (update_android_aar_prebuilts) { print("Writing " + rebase_path(_info_path, "//")) exec_script("//build/android/gyp/aar.py", [ - "list", - rebase_path(invoker.aar_path, root_build_dir), - "--output", - rebase_path(_info_path, root_build_dir), - ]) + "list", + "--output", + rebase_path(_info_path, root_build_dir), + ] + _aar_common_args) } # If "gn gen" is failing on the following line, you need to generate an @@ -4187,6 +4555,7 @@ if (enable_java_templates) { _scanned_files = read_file(_info_path, "scope") _use_scanned_assets = !_ignore_assets && _scanned_files.assets != [] + _has_resources = _scanned_files.resources != [] assert(_ignore_aidl || _scanned_files.aidl == [], "android_aar_prebuilt() aidl not yet supported." + @@ -4207,30 +4576,24 @@ if (enable_java_templates) { action_with_pydeps(_unpack_target_name) { script = "//build/android/gyp/aar.py" # Unzips the AAR args = [ - "extract", - rebase_path(invoker.aar_path, root_build_dir), - "--output-dir", - rebase_path(_output_path, root_build_dir), - "--assert-info-file", - rebase_path(_info_path, root_build_dir), - ] - if (_strip_resources) { - args += [ "--ignore-resources" ] - } + "extract", + "--output-dir", + rebase_path(_output_path, root_build_dir), + "--assert-info-file", + rebase_path(_info_path, root_build_dir), + ] + _aar_common_args inputs = [ invoker.aar_path ] outputs = [ "${_output_path}/AndroidManifest.xml" ] - if (!_strip_resources && _scanned_files.has_r_text_file) { + outputs += + get_path_info(rebase_path(_scanned_files.resources, "", _output_path), + "abspath") + if (_scanned_files.has_r_text_file) { # Certain packages, in particular Play Services have no R.txt even # though its presence is mandated by AAR spec. Such packages cause # spurious rebuilds if this output is specified unconditionally. outputs += [ "${_output_path}/R.txt" ] } - if (!_strip_resources && _scanned_files.resources != []) { - outputs += get_path_info( - rebase_path(_scanned_files.resources, "", _output_path), - "abspath") - } if (_scanned_files.has_classes_jar) { outputs += [ "${_output_path}/classes.jar" ] } @@ -4255,15 +4618,11 @@ if (enable_java_templates) { } } - _has_unignored_resources = - !_strip_resources && - (_scanned_files.resources != [] || _scanned_files.has_r_text_file) - _should_process_manifest = !_ignore_manifest && !_scanned_files.is_manifest_empty # Create the android_resources target for resources. - if (_has_unignored_resources || _should_process_manifest) { + if (_has_resources || _should_process_manifest) { _res_target_name = "${target_name}__resources" android_resources(_res_target_name) { forward_variables_from(invoker, @@ -4273,7 +4632,7 @@ if (enable_java_templates) { "testonly", "strip_drawables", ]) - deps = [ ":$_unpack_target_name" ] + public_deps = [ ":$_unpack_target_name" ] if (_should_process_manifest) { android_manifest_dep = ":$_unpack_target_name" android_manifest = "${_output_path}/AndroidManifest.xml" @@ -4282,11 +4641,8 @@ if (enable_java_templates) { custom_package = _scanned_files.manifest_package } - sources = [] - if (!_strip_resources) { - sources = rebase_path(_scanned_files.resources, "", _output_path) - } - if (!_strip_resources && _scanned_files.has_r_text_file) { + sources = rebase_path(_scanned_files.resources, "", _output_path) + if (_scanned_files.has_r_text_file) { r_text_file = "${_output_path}/R.txt" } } @@ -4310,6 +4666,7 @@ if (enable_java_templates) { _assets_target_name = "${target_name}__assets" android_assets(_assets_target_name) { forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + deps = [ ":$_unpack_target_name" ] renaming_sources = [] renaming_destinations = [] foreach(_asset_file, _scanned_files.assets) { @@ -4323,9 +4680,12 @@ if (enable_java_templates) { } } + _target_label = get_label_info(":$target_name", "label_no_toolchain") + # Create android_java_prebuilt target for classes.jar. if (_scanned_files.has_classes_jar) { _java_library_vars = [ + "alternative_android_sdk_dep", "bytecode_rewriter_target", "enable_bytecode_checks", "jar_excluded_patterns", @@ -4350,7 +4710,7 @@ if (enable_java_templates) { jar_path = "$_output_path/${_tuple[1]}" _base_output_name = get_path_info(jar_path, "name") output_name = "${invoker.target_name}-$_base_output_name" - public_target_label = invoker.target_name + public_target_label = _target_label } } @@ -4361,6 +4721,7 @@ if (enable_java_templates) { [ "deps", "input_jars_paths", + "mergeable_android_manifests", "proguard_configs", ]) if (!defined(deps)) { @@ -4386,13 +4747,16 @@ if (enable_java_templates) { proguard_configs += [ "$_output_path/proguard.txt" ] } } - public_target_label = invoker.target_name + public_target_label = _target_label } } java_group(target_name) { forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) public_deps = [ ":$_unpack_target_name" ] + if (defined(invoker.public_deps)) { + public_deps += invoker.public_deps + } deps = [] if (defined(_jar_target_name)) { deps += [ ":$_jar_target_name" ] @@ -4439,6 +4803,7 @@ if (enable_java_templates) { # keystore_name: optional keystore name, used only when generating APKs. # keystore_password: optional keystore password, used only when # generating APKs. + # rotation_config: optional .textproto to enable key rotation. # # command_line_flags_file: Optional. If provided, named of the on-device # file that will be used to store command-line arguments. The default @@ -4460,8 +4825,7 @@ if (enable_java_templates) { # used as a library jar for synchronized proguarding. # # compress_shared_libraries: Optional. Whether to compress shared libraries - # such that they are extracted upon install. Libraries prefixed with - # "crazy." are never compressed. + # such that they are extracted upon install. # # system_image_locale_allowlist: List of locales that should be included # on system APKs generated from this bundle. @@ -4490,6 +4854,11 @@ if (enable_java_templates) { # default_modules_for_testing: (optional): A list of DFM that the wrapper # script should install. This is for local testing only, and does not # affect the actual DFM in production. + # + # add_view_trace_events: (optional): If true will add an additional step to + # add trace events to all Android views contained in the bundle. It also + # requires build argument enable_trace_event_bytecode_rewriting = true. + # # Example: # android_app_bundle("chrome_public_bundle") { # base_module_target = "//chrome/android:chrome_public_apk" @@ -4507,19 +4876,11 @@ if (enable_java_templates) { _proguard_enabled = defined(invoker.proguard_enabled) && invoker.proguard_enabled - if (defined(invoker.version_code)) { - _version_code = invoker.version_code - } else { - _version_code = android_default_version_code - } - - if (android_override_version_code != "") { - _version_code = android_override_version_code + _min_sdk_version = default_min_sdk_version + if (defined(invoker.min_sdk_version)) { + _min_sdk_version = invoker.min_sdk_version } - # Prevent "unused variable". - not_needed([ "_version_code" ]) - _bundle_base_path = "$root_build_dir/apks" if (defined(invoker.bundle_base_path)) { _bundle_base_path = invoker.bundle_base_path @@ -4536,7 +4897,7 @@ if (enable_java_templates) { _base_target_gen_dir = get_label_info(invoker.base_module_target, "target_gen_dir") _base_module_build_config = - "$_base_target_gen_dir/${_base_target_name}.build_config" + "$_base_target_gen_dir/${_base_target_name}.build_config.json" _base_module_build_config_target = "${invoker.base_module_target}$build_config_target_suffix" _rebased_base_module_build_config = @@ -4554,22 +4915,7 @@ if (enable_java_templates) { }, ] - _enable_multidex = - !defined(invoker.enable_multidex) || invoker.enable_multidex - - if (!_proguard_enabled && defined(invoker.min_sdk_version)) { - not_needed(invoker, [ "min_sdk_version" ]) - } - - # Prevent "unused variable". - not_needed([ "_enable_multidex" ]) - if (_proguard_enabled) { - _uses_static_library_synchronized_proguard = - defined(invoker.static_library_synchronized_proguard) && - invoker.static_library_synchronized_proguard - - # TODO(crbug.com/1032609): Remove dexsplitter from Trichrome Proguard. _dex_target = "${_target_name}__dex" _proguard_mapping_path = "${_bundle_path}.mapping" } @@ -4595,7 +4941,7 @@ if (enable_java_templates) { _module_target_gen_dir = get_label_info(_module_target, "target_gen_dir") _module.build_config = - "$_module_target_gen_dir/${_module_target_name}.build_config" + "$_module_target_gen_dir/${_module_target_name}.build_config.json" _module.build_config_target = "$_module_target$build_config_target_suffix" _module.parent = "base" @@ -4621,8 +4967,7 @@ if (enable_java_templates) { get_label_info(invoker.static_library_provider, "name") _static_library_gen_dir = get_label_info(invoker.static_library_provider, "target_gen_dir") - _lib_proxy_module.build_config = - "$_static_library_gen_dir/$_static_library_target_name.build_config" + _lib_proxy_module.build_config = "$_static_library_gen_dir/$_static_library_target_name.build_config.json" _lib_proxy_module.build_config_target = "${invoker.static_library_provider}$build_config_target_suffix" } @@ -4673,74 +5018,156 @@ if (enable_java_templates) { deps = [ "${invoker.base_module_target}__compile_resources" ] } - _build_config = "$target_gen_dir/${_target_name}.build_config" + _build_config = "$target_gen_dir/${_target_name}.build_config.json" _rebased_build_config = rebase_path(_build_config, root_build_dir) _build_config_target = "$_target_name$build_config_target_suffix" if (defined(invoker.proguard_android_sdk_dep)) { - proguard_android_sdk_dep_ = invoker.proguard_android_sdk_dep + _android_sdk_dep = invoker.proguard_android_sdk_dep } else { - proguard_android_sdk_dep_ = "//third_party/android_sdk:android_sdk_java" + _android_sdk_dep = default_android_sdk_dep } if (_proguard_enabled) { _proguard_mapping_path = "${_bundle_path}.mapping" + _add_view_trace_events = + defined(invoker.add_view_trace_events) && + invoker.add_view_trace_events && enable_trace_event_bytecode_rewriting + } else { + not_needed(invoker, [ "add_view_trace_events" ]) } write_build_config(_build_config_target) { type = "android_app_bundle" - possible_config_deps = _module_targets + [ proguard_android_sdk_dep_ ] + possible_config_deps = _module_targets + [ _android_sdk_dep ] build_config = _build_config proguard_enabled = _proguard_enabled module_build_configs = _module_build_configs + modules = _modules if (_proguard_enabled) { + add_view_trace_events = _add_view_trace_events proguard_mapping_path = _proguard_mapping_path } } if (_proguard_enabled) { - # If this Bundle uses a static library, the static library APK will - # create the synchronized dex file path. - if (_uses_static_library_synchronized_proguard) { - if (defined(invoker.min_sdk_version)) { - not_needed(invoker, [ "min_sdk_version" ]) + if (_add_view_trace_events) { + _trace_event_rewriter_target = + "//build/android/bytecode:trace_event_adder" + _rewritten_jar_target_name = "${target_name}__trace_event_rewritten" + _rewriter_path = root_build_dir + "/bin/helper/trace_event_adder" + _stamp = "${target_out_dir}/${target_name}.trace_event_rewrite.stamp" + action_with_pydeps(_rewritten_jar_target_name) { + script = "//build/android/gyp/trace_event_bytecode_rewriter.py" + inputs = [ + _rewriter_path, + _build_config, + ] + outputs = [ _stamp ] + depfile = "$target_gen_dir/$_rewritten_jar_target_name.d" + args = [ + "--stamp", + rebase_path(_stamp, root_build_dir), + "--depfile", + rebase_path(depfile, root_build_dir), + "--script", + rebase_path(_rewriter_path, root_build_dir), + "--classpath", + "@FileArg($_rebased_build_config:deps_info:javac_full_classpath)", + "--classpath", + "@FileArg($_rebased_build_config:android:sdk_jars)", + "--input-jars", + "@FileArg($_rebased_build_config:deps_info:device_classpath)", + "--output-jars", + "@FileArg($_rebased_build_config:deps_info:trace_event_rewritten_device_classpath)", + ] + deps = [ + ":$_build_config_target", + _trace_event_rewriter_target, + ] + _module_java_targets } - } else { - dex(_dex_target) { - forward_variables_from(invoker, - [ - "expected_proguard_config", - "expected_proguard_config_base", - "min_sdk_version", - "proguard_enable_obfuscation", - ]) - if (defined(expected_proguard_config)) { - top_target_name = _target_name - } - enable_multidex = _enable_multidex - proguard_enabled = true - proguard_mapping_path = _proguard_mapping_path - proguard_sourcefile_suffix = "$android_channel-$_version_code" - build_config = _build_config + } - deps = _module_java_targets + [ ":$_build_config_target" ] - modules = _modules + dex(_dex_target) { + forward_variables_from(invoker, + [ + "custom_assertion_handler", + "expected_proguard_config", + "expected_proguard_config_base", + "proguard_enable_obfuscation", + ]) + if (defined(expected_proguard_config)) { + top_target_name = _target_name } + min_sdk_version = _min_sdk_version + add_view_trace_events = _add_view_trace_events + proguard_enabled = true + proguard_mapping_path = _proguard_mapping_path + build_config = _build_config + + deps = _module_java_targets + [ ":$_build_config_target" ] + if (_add_view_trace_events) { + deps += [ ":${_rewritten_jar_target_name}" ] + } + modules = _modules } } _all_create_module_targets = [] _all_module_zip_paths = [] _all_module_build_configs = [] + _all_module_unused_resources_deps = [] foreach(_module, _modules) { _module_target = _module.module_target _module_build_config = _module.build_config _module_build_config_target = _module.build_config_target + _module_target_name = get_label_info(_module_target, "name") if (!_proguard_enabled) { - _dex_target_for_module = "${_module_target}__final_dex" + _dex_target = "${_module_target_name}__final_dex" + _dex_path = "$target_out_dir/$_module_target_name/$_module_target_name.mergeddex.jar" + dex(_dex_target) { + forward_variables_from(invoker, [ "custom_assertion_handler" ]) + min_sdk_version = _min_sdk_version + output = _dex_path + build_config = _build_config + + # This will be a pure dex-merge. + input_dex_filearg = "@FileArg($_rebased_build_config:modules:${_module.name}:all_dex_files)" + enable_desugar = false + + deps = [ + ":$_build_config_target", + ":${_module_target_name}__java", + ] + } + } + _dex_target_for_module = ":$_dex_target" + + _use_baseline_profile = + _proguard_enabled && defined(invoker.baseline_profile_path) && + enable_baseline_profiles + if (_use_baseline_profile) { + _binary_profile_target = + "${_module_target_name}__binary_baseline_profile" + _binary_baseline_profile_path = "$target_out_dir/$_module_target_name/$_module_target_name.baseline.prof" + _binary_baseline_profile_metadata_path = + _binary_baseline_profile_path + "m" + create_binary_profile(_binary_profile_target) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + binary_baseline_profile_path = _binary_baseline_profile_path + binary_baseline_profile_metadata_path = + _binary_baseline_profile_metadata_path + proguard_mapping_path = _proguard_mapping_path + build_config = _module_build_config + input_profile_path = invoker.baseline_profile_path + deps = [ + _dex_target_for_module, + _module_build_config_target, + ] + } } else { - _dex_target_for_module = ":$_dex_target" + not_needed(invoker, [ "baseline_profile_path" ]) } # Generate one module .zip file per bundle module. @@ -4749,20 +5176,22 @@ if (enable_java_templates) { # the internal module name inside the final bundle, in other words, # this file *must* be named ${_module.name}.zip _create_module_target = "${_target_name}__${_module.name}__create" - _module_zip_path = "$target_gen_dir/$target_name/${_module.name}.zip" - + _module_zip_path = "$target_out_dir/$target_name/${_module.name}.zip" create_android_app_bundle_module(_create_module_target) { forward_variables_from(invoker, [ "is_multi_abi", - "min_sdk_version", "uncompress_dex", - "proguard_enabled", ]) module_name = _module.name + min_sdk_version = _min_sdk_version build_config = _module_build_config module_zip_path = _module_zip_path native_libraries_config = _native_libraries_config + if (!_proguard_enabled) { + dex_path = _dex_path + # dex_path is read from the build_config in the proguard case. + } if (module_name == "base" && defined(invoker.expected_libs_and_assets)) { @@ -4792,6 +5221,17 @@ if (enable_java_templates) { _secondary_abi_native_libraries_config deps += [ ":$_secondary_abi_native_libraries_config_target" ] } + + if (_use_baseline_profile) { + # extra_assets is a list of ["{src_path}:{dst_path}"] + extra_assets = [ + rebase_path(_binary_baseline_profile_path, root_build_dir) + + ":dexopt/baseline.prof", + rebase_path(_binary_baseline_profile_metadata_path, + root_build_dir) + ":dexopt/baseline.profm", + ] + deps += [ ":$_binary_profile_target" ] + } } _all_create_module_targets += [ @@ -4801,6 +5241,40 @@ if (enable_java_templates) { ] _all_module_zip_paths += [ _module_zip_path ] _all_module_build_configs += [ _module_build_config ] + _all_module_unused_resources_deps += [ + "${_module_target}__compile_resources", + _dex_target_for_module, + _module_build_config_target, + ] + } + _strip_unused_resources = defined(invoker.strip_unused_resources) && + invoker.strip_unused_resources + if (_strip_unused_resources) { + # Resources only live in the base module so we define the unused resources + # target only on the base module target. + _unused_resources_target = "${_base_target_name}__unused_resources" + _unused_resources_config = + "${_base_target_gen_dir}/${_base_target_name}_unused_resources.config" + _unused_resources_r_txt_out = + "${_base_target_gen_dir}/${_base_target_name}_unused_resources.R.txt" + unused_resources(_unused_resources_target) { + deps = _all_module_unused_resources_deps + all_module_build_configs = _all_module_build_configs + build_config = _base_module_build_config + if (_proguard_enabled) { + proguard_mapping_path = _proguard_mapping_path + } + output_config = _unused_resources_config + output_r_txt = _unused_resources_r_txt_out + } + _unused_resources_final_path = "${_bundle_path}.unused_resources" + _copy_unused_resources_target = + "${_base_target_name}__copy_unused_resources" + copy(_copy_unused_resources_target) { + deps = [ ":$_unused_resources_target" ] + sources = [ _unused_resources_config ] + outputs = [ _unused_resources_final_path ] + } } _all_rebased_module_zip_paths = @@ -4829,7 +5303,8 @@ if (enable_java_templates) { _bundle_target_name = "${_target_name}__bundle" action_with_pydeps(_bundle_target_name) { script = "//build/android/gyp/create_app_bundle.py" - inputs = _all_module_zip_paths + _all_module_build_configs + inputs = _all_module_zip_paths + _all_module_build_configs + + [ _BUNDLETOOL_JAR_PATH ] outputs = [ _bundle_path ] deps = _all_create_module_targets + [ ":$_build_config_target" ] args = [ @@ -4845,16 +5320,37 @@ if (enable_java_templates) { invoker.compress_shared_libraries) { args += [ "--compress-shared-libraries" ] } + + # Android P+ support loading from stored dex. + if (_min_sdk_version < 27) { + args += [ "--compress-dex" ] + } + + if (defined(invoker.rotation_config)) { + args += [ + "--rotation-config", + rebase_path(invoker.rotation_config, root_build_dir), + ] + } + if (treat_warnings_as_errors) { args += [ "--warnings-as-errors" ] } if (_enable_language_splits) { - args += [ - "--base-allowlist-rtxt-path=@FileArg(" + "${_rebased_base_module_build_config}:deps_info:base_allowlist_rtxt_path)", - "--base-module-rtxt-path=@FileArg(" + - "${_rebased_base_module_build_config}:deps_info:r_text_path)", - ] + args += [ "--base-allowlist-rtxt-path=@FileArg($_rebased_base_module_build_config:deps_info:base_allowlist_rtxt_path)" ] + if (_strip_unused_resources) { + # Use the stripped out rtxt file to set resources that are pinned to + # the default language split. + _rebased_unused_resources_r_txt_out = + rebase_path(_unused_resources_r_txt_out, root_build_dir) + inputs += [ _unused_resources_r_txt_out ] + deps += [ ":$_unused_resources_target" ] + args += + [ "--base-module-rtxt-path=$_rebased_unused_resources_r_txt_out" ] + } else { + args += [ "--base-module-rtxt-path=@FileArg($_rebased_base_module_build_config:deps_info:r_text_path)" ] + } } if (defined(invoker.validate_services) && invoker.validate_services) { args += [ "--validate-services" ] @@ -4937,8 +5433,7 @@ if (enable_java_templates) { args = [ "--script-output-path", rebase_path(_bundle_wrapper_script_path, root_build_dir), - "--package-name=@FileArg(" + - "$_rebased_base_module_build_config:deps_info:package_name)", + "--package-name=@FileArg($_rebased_base_module_build_config:deps_info:package_name)", "--aapt2", rebase_path(_android_aapt2_path, root_build_dir), "--bundle-path", @@ -4998,8 +5493,8 @@ if (enable_java_templates) { forward_variables_from(invoker, [ "lint_baseline_file", + "lint_jar_path", "lint_suppressions_file", - "min_sdk_version", ]) build_config = _build_config build_config_dep = ":$_build_config_target" @@ -5009,12 +5504,15 @@ if (enable_java_templates) { } if (defined(invoker.lint_min_sdk_version)) { min_sdk_version = invoker.lint_min_sdk_version + } else { + min_sdk_version = _min_sdk_version } } } else { not_needed(invoker, [ "lint_baseline_file", + "lint_jar_path", "lint_min_sdk_version", "lint_suppressions_dep", "lint_suppressions_file", @@ -5040,7 +5538,10 @@ if (enable_java_templates) { _apks_path = "$root_build_dir/apks/$_bundle_name.apks" action_with_pydeps("${_target_name}_apks") { script = "//build/android/gyp/create_app_bundle_apks.py" - inputs = [ _bundle_path ] + inputs = [ + _bundle_path, + _BUNDLETOOL_JAR_PATH, + ] outputs = [ _apks_path ] data = [ _apks_path ] args = [ @@ -5057,6 +5558,9 @@ if (enable_java_templates) { "--keystore-password", android_keystore_password, ] + if (debuggable_apks) { + args += [ "--local-testing" ] + } deps = [ ":$_bundle_target_name" ] metadata = { install_artifacts = [ _apks_path ] diff --git a/build/config/android/sdk.gni b/build/config/android/sdk.gni index d2e67a7d4be2..fb39315c4640 100644 --- a/build/config/android/sdk.gni +++ b/build/config/android/sdk.gni @@ -1,10 +1,13 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # The default SDK release used by public builds. Value may differ in # internal builds. -default_android_sdk_release = "r" +default_android_sdk_release = "t" # SDK releases against which public builds are supported. -public_sdk_releases = [ "r" ] +public_sdk_releases = [ + "t", + "tprivacysandbox", +] diff --git a/build/config/android/system_image.gni b/build/config/android/system_image.gni new file mode 100644 index 000000000000..79f856099023 --- /dev/null +++ b/build/config/android/system_image.gni @@ -0,0 +1,174 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +# Creates a stub .apk suitable for use with compressed system APKs. +# +# Variables: +# package_name: Package name to use for the stub. +# version_code: Version code for the stub. +# version_name: Version name for the stub. +# package_info_from_target: Use the package name and version_code from this +# apk/bundle target. +# static_library_name: For static library apks, name for the . +# static_library_version: For static library apks, version for the +# tag (for TrichromeLibrary, we set this to be the same +# as the package's version_code) +# stub_output: Path to output stub apk (default: do not create a stub). +# +# package_name and package_info_from_target are mutually exclusive. +template("system_image_stub_apk") { + # Android requires stubs end with -Stub.apk. + assert(filter_exclude([ invoker.stub_output ], [ "*-Stub.apk" ]) == [], + "stub_output \"${invoker.stub_output}\" must end with \"-Stub.apk\"") + + _resource_apk_path = "${target_out_dir}/$target_name.ap_" + _resource_apk_target_name = "${target_name}__compile_resources" + + _manifest_target_name = "${target_name}__manifest" + _manifest_path = "$target_gen_dir/$_manifest_target_name.xml" + action("$_manifest_target_name") { + outputs = [ _manifest_path ] + script = "//build/android/gyp/create_stub_manifest.py" + args = [ + "--output", + rebase_path(_manifest_path, root_build_dir), + ] + if (defined(invoker.static_library_name)) { + args += [ + "--static-library-name", + invoker.static_library_name, + ] + + # TODO(crbug.com/1408164): Make static_library_version mandatory. + if (defined(invoker.static_library_version)) { + args += [ + "--static-library-version", + invoker.static_library_version, + ] + } else { + args += [ "--static-library-version=1" ] + } + } + } + + action_with_pydeps(_resource_apk_target_name) { + script = "//build/android/gyp/compile_resources.py" + inputs = [ + _manifest_path, + android_sdk_jar, + ] + outputs = [ _resource_apk_path ] + args = [ + "--aapt2-path", + rebase_path(android_sdk_tools_bundle_aapt2, root_build_dir), + "--min-sdk-version=$default_min_sdk_version", + "--target-sdk-version=$default_android_sdk_version", + "--android-manifest", + rebase_path(_manifest_path, root_build_dir), + "--arsc-path", + rebase_path(_resource_apk_path, root_build_dir), + ] + deps = [ ":$_manifest_target_name" ] + if (defined(invoker.package_name)) { + _package_name = invoker.package_name + _version_code = invoker.version_code + _version_name = invoker.version_name + } else { + _target = invoker.package_info_from_target + deps += [ "${_target}$build_config_target_suffix" ] + _build_config = get_label_info(_target, "target_gen_dir") + "/" + + get_label_info(_target, "name") + ".build_config.json" + inputs += [ _build_config ] + _rebased_build_config = rebase_path(_build_config, root_build_dir) + _package_name = "@FileArg($_rebased_build_config:deps_info:package_name)" + _version_code = "@FileArg($_rebased_build_config:deps_info:version_code)" + _version_name = "@FileArg($_rebased_build_config:deps_info:version_name)" + } + args += [ + "--rename-manifest-package=$_package_name", + "--arsc-package-name=$_package_name", + "--version-code=$_version_code", + "--version-name=$_version_name", + "--include-resources", + rebase_path(android_sdk_jar, root_build_dir), + ] + } + + package_apk(target_name) { + forward_variables_from(invoker, + [ + "keystore_name", + "keystore_path", + "keystore_password", + ]) + min_sdk_version = default_min_sdk_version + deps = [ ":$_resource_apk_target_name" ] + + packaged_resources_path = _resource_apk_path + output_apk_path = invoker.stub_output + } +} + +# Generates artifacts for system APKs. +# +# Variables: +# apk_or_bundle_target: Target that creates input bundle or apk. +# input_apk_or_bundle: Path to input .apk or .aab. +# static_library_name: For static library apks, name for the . +# static_library_version: For static library apks, version for the +# tag (for TrichromeLibrary, we set this to be the same +# as the package's version_code) +# output: Path to the output system .apk or .zip. +# fuse_apk: Fuse all apk splits into a single .apk (default: false). +# stub_output: Path to output stub apk (default: do not create a stub). +# +template("system_image_apks") { + if (defined(invoker.stub_output)) { + _stub_apk_target_name = "${target_name}__stub" + system_image_stub_apk(_stub_apk_target_name) { + forward_variables_from(invoker, + [ + "static_library_name", + "static_library_version", + ]) + package_info_from_target = invoker.apk_or_bundle_target + stub_output = invoker.stub_output + } + } + + action_with_pydeps(target_name) { + script = "//build/android/gyp/system_image_apks.py" + deps = [ invoker.apk_or_bundle_target ] + inputs = [ invoker.input_apk_or_bundle ] + if (defined(invoker.stub_output)) { + public_deps = [ ":$_stub_apk_target_name" ] + } + outputs = [ invoker.output ] + args = [ + "--input", + rebase_path(invoker.input_apk_or_bundle, root_out_dir), + "--output", + rebase_path(invoker.output, root_out_dir), + ] + + _is_bundle = + filter_exclude([ invoker.input_apk_or_bundle ], [ "*.aab" ]) == [] + + if (_is_bundle) { + _wrapper_path = "$root_out_dir/bin/" + + get_label_info(invoker.apk_or_bundle_target, "name") + args += [ + "--bundle-wrapper", + rebase_path(_wrapper_path, root_out_dir), + ] + inputs += [ _wrapper_path ] + deps += [ "//build/android:apk_operations_py" ] + if (defined(invoker.fuse_apk) && invoker.fuse_apk) { + args += [ "--fuse-apk" ] + } + } + } +} diff --git a/build/config/android/test/classpath_order/BUILD.gn b/build/config/android/test/classpath_order/BUILD.gn deleted file mode 100644 index decd1a84d243..000000000000 --- a/build/config/android/test/classpath_order/BUILD.gn +++ /dev/null @@ -1,111 +0,0 @@ -# Copyright 2021 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import("//build/config/android/rules.gni") - -template("test_resources") { - jinja_template_resources(target_name) { - forward_variables_from(invoker, "*") - testonly = true - variables = [ "resource_name=$resource_name" ] - res_dir = "java/res_template" - resources = [ "java/res_template/values/values.xml" ] - } -} - -template("generate_dummy_android_library") { - # No underscores to avoid crbug.com/908819. - _generate_java_source_target_name = "${target_name}generatejavasource" - jinja_template(_generate_java_source_target_name) { - testonly = true - input = "java/src/org/chromium/build/classpath_order/Dummy.java.jinja2" - output = "$target_gen_dir/java/src/org/chromium/build/classpath_order/${invoker.class_name}.java" - variables = [ "class_name=${invoker.class_name}" ] - } - - android_library(target_name) { - forward_variables_from(invoker, "*") - - if (!defined(invoker.deps)) { - deps = [] - } - - sources = get_target_outputs(":${_generate_java_source_target_name}") - deps += [ ":${_generate_java_source_target_name}" ] - } -} - -# Test that classpath order keeps resources accessible when multiple targets generate -# resources for the same package. Specifically, test that an android_library precedes -# its dependencies regardless of the relative lexographic order. - -test_resources("a1_dependency_resources") { - resource_name = "a1_dependency_resource" -} - -generate_dummy_android_library("a1_dependency_java") { - testonly = true - class_name = "A1Dependency" - resources_package = "org.chromium.build.classpath_order.test1" - deps = [ ":a1_dependency_resources" ] -} - -test_resources("z1_master_resources") { - resource_name = "z1_master_resource" - deps = [ ":a1_dependency_resources" ] -} - -generate_dummy_android_library("z1_master_java") { - testonly = true - class_name = "Z1Master" - resources_package = "org.chromium.build.classpath_order.test1" - deps = [ - ":a1_dependency_java", - ":z1_master_resources", - ] -} - -test_resources("z2_dependency_resources") { - resource_name = "z2_dependency_resource" -} - -generate_dummy_android_library("z2_dependency_java") { - testonly = true - class_name = "Z2Dependency" - resources_package = "org.chromium.build.classpath_order.test2" - deps = [ ":z2_dependency_resources" ] -} - -test_resources("a2_master_resources") { - resource_name = "a2_master_resource" - deps = [ ":z2_dependency_resources" ] -} - -generate_dummy_android_library("a2_master_java") { - testonly = true - class_name = "A2Master" - resources_package = "org.chromium.build.classpath_order.test2" - deps = [ - ":a2_master_resources", - ":z2_dependency_java", - ] -} - -java_library("junit_tests") { - bypass_platform_checks = true - testonly = true - sources = - [ "java/src/org/chromium/build/classpath_order/ClassPathOrderTest.java" ] - deps = [ - ":a1_dependency_java", - ":a2_master_java", - ":z1_master_java", - ":z2_dependency_java", - "//testing/android/junit:junit_test_support", - "//third_party/android_deps:robolectric_all_java", - "//third_party/android_support_test_runner:runner_java", - "//third_party/androidx:androidx_test_runner_java", - "//third_party/junit", - ] -} diff --git a/build/config/android/test/classpath_order/java/res_template/values/values.xml b/build/config/android/test/classpath_order/java/res_template/values/values.xml deleted file mode 100644 index ee706b289b5b..000000000000 --- a/build/config/android/test/classpath_order/java/res_template/values/values.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - 42 - diff --git a/build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/ClassPathOrderTest.java b/build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/ClassPathOrderTest.java deleted file mode 100644 index c5a920260589..000000000000 --- a/build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/ClassPathOrderTest.java +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2021 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -package org.chromium.build.classpath_order; - -import static org.junit.Assert.assertTrue; - -import androidx.test.filters.SmallTest; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.robolectric.annotation.Config; - -import org.chromium.testing.local.LocalRobolectricTestRunner; - -/** - * Test that resources defined in different android_resources() targets but with the same - * package are accessible. - */ -@RunWith(LocalRobolectricTestRunner.class) -@Config(manifest = Config.NONE) -public final class ClassPathOrderTest { - @Test - @SmallTest - public void testAll() { - assertTrue(org.chromium.build.classpath_order.test1.R.integer.a1_dependency_resource >= 0); - assertTrue(org.chromium.build.classpath_order.test1.R.integer.z1_master_resource >= 0); - assertTrue(org.chromium.build.classpath_order.test2.R.integer.z2_dependency_resource >= 0); - assertTrue(org.chromium.build.classpath_order.test2.R.integer.a2_master_resource >= 0); - } -} diff --git a/build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/Dummy.java.jinja2 b/build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/Dummy.java.jinja2 deleted file mode 100644 index 0ccf28b28490..000000000000 --- a/build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/Dummy.java.jinja2 +++ /dev/null @@ -1,8 +0,0 @@ -// Copyright 2021 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -package org.chromium.build.classpath_order; - -public class {{class_name}} { -} diff --git a/build/config/android/test/proto/BUILD.gn b/build/config/android/test/proto/BUILD.gn index a28111a66a86..1d0f37a33951 100644 --- a/build/config/android/test/proto/BUILD.gn +++ b/build/config/android/test/proto/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/android/test/proto/absolute_dep/absolute_dep.proto b/build/config/android/test/proto/absolute_dep/absolute_dep.proto index 46dcce7679be..f4aa92a9c59b 100644 --- a/build/config/android/test/proto/absolute_dep/absolute_dep.proto +++ b/build/config/android/test/proto/absolute_dep/absolute_dep.proto @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/config/android/test/proto/relative_dep/relative_dep.proto b/build/config/android/test/proto/relative_dep/relative_dep.proto index 600b6ca7fed4..917d2c3e5445 100644 --- a/build/config/android/test/proto/relative_dep/relative_dep.proto +++ b/build/config/android/test/proto/relative_dep/relative_dep.proto @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/config/android/test/proto/root/absolute_child.proto b/build/config/android/test/proto/root/absolute_child.proto index d6a6a13f368e..389538c9b04e 100644 --- a/build/config/android/test/proto/root/absolute_child.proto +++ b/build/config/android/test/proto/root/absolute_child.proto @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/config/android/test/proto/root/absolute_root.proto b/build/config/android/test/proto/root/absolute_root.proto index 3e200978d951..ad138abe06c9 100644 --- a/build/config/android/test/proto/root/absolute_root.proto +++ b/build/config/android/test/proto/root/absolute_root.proto @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/config/android/test/proto/root/relative_child.proto b/build/config/android/test/proto/root/relative_child.proto index 10f7ed42773f..225758cbee49 100644 --- a/build/config/android/test/proto/root/relative_child.proto +++ b/build/config/android/test/proto/root/relative_child.proto @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/config/android/test/proto/root/relative_root.proto b/build/config/android/test/proto/root/relative_root.proto index a37a268b4f26..9644fa113412 100644 --- a/build/config/android/test/proto/root/relative_root.proto +++ b/build/config/android/test/proto/root/relative_root.proto @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/config/android/test/resource_overlay/BUILD.gn b/build/config/android/test/resource_overlay/BUILD.gn index 4a063d221517..3b7936384c61 100644 --- a/build/config/android/test/resource_overlay/BUILD.gn +++ b/build/config/android/test/resource_overlay/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -43,7 +43,7 @@ test_resources("root_tagged_root_resources") { deps = [ ":root_tagged_dependency_resources" ] } -android_library("javatests") { +android_library("unit_device_javatests") { testonly = true sources = [ "java/src/org/chromium/build/resource_overlay/ResourceOverlayTest.java", @@ -53,7 +53,7 @@ android_library("javatests") { ":dependency_tagged_root_resources", ":root_tagged_root_resources", "//base:base_java_test_support", - "//third_party/android_support_test_runner:runner_java", + "//third_party/androidx:androidx_test_monitor_java", "//third_party/androidx:androidx_test_runner_java", "//third_party/junit", ] diff --git a/build/config/android/test/resource_overlay/java/res_template/values/values.xml b/build/config/android/test/resource_overlay/java/res_template/values/values.xml index 973f855206df..13ff516535a6 100644 --- a/build/config/android/test/resource_overlay/java/res_template/values/values.xml +++ b/build/config/android/test/resource_overlay/java/res_template/values/values.xml @@ -1,5 +1,5 @@ - - + {__value_} __value_ - + {*($T1*)this} *($T1*)this - + {{ size={$T2} }} @@ -50,7 +50,7 @@ and the inline data in the remaining storage.) --> - + + {{ size={__size_} }} @@ -154,7 +154,7 @@ - + {{ size={size()} }} @@ -163,14 +163,14 @@ __end_.__next_ __next_ - ((std::__1::list<$T1,$T2>::__node_pointer)this) + ((std::Cr::list<$T1,$T2>::__node_pointer)this) ->__value_ - + {{ size={size()} }} @@ -181,22 +181,22 @@ ((__node_pointer)&__tree_.__pair1_)->__left_ - ((std::__1::map<$T1,$T2,$T3,$T4>::__node_pointer)this) + ((std::Cr::map<$T1,$T2,$T3,$T4>::__node_pointer)this) ->__left_ - ((std::__1::map<$T1,$T2,$T3,$T4>::__node_pointer)this) + ((std::Cr::map<$T1,$T2,$T3,$T4>::__node_pointer)this) ->__right_ - ((std::__1::map<$T1,$T2,$T3,$T4>::__node_pointer)this) - ->__value_.__cc + ((std::Cr::map<$T1,$T2,$T3,$T4>::__node_pointer)this) + ->__value_.__cc_ - + {{ size={size()} }} @@ -207,22 +207,22 @@ ((__node_pointer)&__tree_.__pair1_)->__left_ - ((std::__1::multimap<$T1,$T2,$T3,$T4>::__node_pointer)this) + ((std::Cr::multimap<$T1,$T2,$T3,$T4>::__node_pointer)this) ->__left_ - ((std::__1::multimap<$T1,$T2,$T3,$T4>::__node_pointer)this) + ((std::Cr::multimap<$T1,$T2,$T3,$T4>::__node_pointer)this) ->__right_ - ((std::__1::multimap<$T1,$T2,$T3,$T4>::__node_pointer)this) - ->__value_.__cc + ((std::Cr::multimap<$T1,$T2,$T3,$T4>::__node_pointer)this) + ->__value_.__cc_ - + {{ size={size()} }} @@ -233,22 +233,22 @@ ((__base::__node_pointer)&__tree_.__pair1_)->__left_ - ((std::__1::multiset<$T1,$T2,$T3>::__base::__node_pointer)this) + ((std::Cr::multiset<$T1,$T2,$T3>::__base::__node_pointer)this) ->__left_ - ((std::__1::multiset<$T1,$T2,$T3>::__base::__node_pointer)this) + ((std::Cr::multiset<$T1,$T2,$T3>::__base::__node_pointer)this) ->__right_ - ((std::__1::multiset<$T1,$T2,$T3>::__base::__node_pointer)this) + ((std::Cr::multiset<$T1,$T2,$T3>::__base::__node_pointer)this) ->__value_ - + {c} c @@ -256,7 +256,7 @@ - + {{ size={size()} }} @@ -267,108 +267,108 @@ ((__base::__node_pointer)&__tree_.__pair1_)->__left_ - ((std::__1::set<$T1,$T2,$T3>::__base::__node_pointer)this) + ((std::Cr::set<$T1,$T2,$T3>::__base::__node_pointer)this) ->__left_ - ((std::__1::set<$T1,$T2,$T3>::__base::__node_pointer)this) + ((std::Cr::set<$T1,$T2,$T3>::__base::__node_pointer)this) ->__right_ - ((std::__1::set<$T1,$T2,$T3>::__base::__node_pointer)this) + ((std::Cr::set<$T1,$T2,$T3>::__base::__node_pointer)this) ->__value_ - - + + {c} c - + {__value_} - + () - - ({(std::__1::__tuple_leaf<0,$T1,0>)__base_}) + + ({(std::Cr::__tuple_leaf<0,$T1,0>)__base_}) - (std::__1::__tuple_leaf<0,$T1,0>)__base_ + (std::Cr::__tuple_leaf<0,$T1,0>)__base_ - - ({(std::__1::__tuple_leaf<0,$T1,0>)__base_}, {(std::__1::__tuple_leaf<1,$T2,0>)__base_}) + + ({(std::Cr::__tuple_leaf<0,$T1,0>)__base_}, {(std::Cr::__tuple_leaf<1,$T2,0>)__base_}) - (std::__1::__tuple_leaf<0,$T1,0>)__base_ - (std::__1::__tuple_leaf<1,$T2,0>)__base_ + (std::Cr::__tuple_leaf<0,$T1,0>)__base_ + (std::Cr::__tuple_leaf<1,$T2,0>)__base_ - - ({(std::__1::__tuple_leaf<0,$T1,0>)__base_}, {(std::__1::__tuple_leaf<1,$T2,0>)__base_}, {(std::__1::__tuple_leaf<2,$T3,0>)__base_}) + + ({(std::Cr::__tuple_leaf<0,$T1,0>)__base_}, {(std::Cr::__tuple_leaf<1,$T2,0>)__base_}, {(std::Cr::__tuple_leaf<2,$T3,0>)__base_}) - (std::__1::__tuple_leaf<0,$T1,0>)__base_ - (std::__1::__tuple_leaf<1,$T2,0>)__base_ - (std::__1::__tuple_leaf<2,$T3,0>)__base_ + (std::Cr::__tuple_leaf<0,$T1,0>)__base_ + (std::Cr::__tuple_leaf<1,$T2,0>)__base_ + (std::Cr::__tuple_leaf<2,$T3,0>)__base_ - - ({(std::__1::__tuple_leaf<0,$T1,0>)__base_}, {(std::__1::__tuple_leaf<1,$T2,0>)__base_}, {(std::__1::__tuple_leaf<2,$T3,0>)__base_}, {(std::__1::__tuple_leaf<3,$T4,0>)__base_}) + + ({(std::Cr::__tuple_leaf<0,$T1,0>)__base_}, {(std::Cr::__tuple_leaf<1,$T2,0>)__base_}, {(std::Cr::__tuple_leaf<2,$T3,0>)__base_}, {(std::Cr::__tuple_leaf<3,$T4,0>)__base_}) - (std::__1::__tuple_leaf<0,$T1,0>)__base_ - (std::__1::__tuple_leaf<1,$T2,0>)__base_ - (std::__1::__tuple_leaf<2,$T3,0>)__base_ - (std::__1::__tuple_leaf<3,$T4,0>)__base_ + (std::Cr::__tuple_leaf<0,$T1,0>)__base_ + (std::Cr::__tuple_leaf<1,$T2,0>)__base_ + (std::Cr::__tuple_leaf<2,$T3,0>)__base_ + (std::Cr::__tuple_leaf<3,$T4,0>)__base_ - - ({(std::__1::__tuple_leaf<0,$T1,0>)__base_}, {(std::__1::__tuple_leaf<1,$T2,0>)__base_}, {(std::__1::__tuple_leaf<2,$T3,0>)__base_}, {(std::__1::__tuple_leaf<3,$T4,0>)__base_}, {(std::__1::__tuple_leaf<4,$T5,0>)__base_}) + + ({(std::Cr::__tuple_leaf<0,$T1,0>)__base_}, {(std::Cr::__tuple_leaf<1,$T2,0>)__base_}, {(std::Cr::__tuple_leaf<2,$T3,0>)__base_}, {(std::Cr::__tuple_leaf<3,$T4,0>)__base_}, {(std::Cr::__tuple_leaf<4,$T5,0>)__base_}) - (std::__1::__tuple_leaf<0,$T1,0>)__base_ - (std::__1::__tuple_leaf<1,$T2,0>)__base_ - (std::__1::__tuple_leaf<2,$T3,0>)__base_ - (std::__1::__tuple_leaf<3,$T4,0>)__base_ - (std::__1::__tuple_leaf<4,$T5,0>)__base_ + (std::Cr::__tuple_leaf<0,$T1,0>)__base_ + (std::Cr::__tuple_leaf<1,$T2,0>)__base_ + (std::Cr::__tuple_leaf<2,$T3,0>)__base_ + (std::Cr::__tuple_leaf<3,$T4,0>)__base_ + (std::Cr::__tuple_leaf<4,$T5,0>)__base_ - - ({(std::__1::__tuple_leaf<0,$T1,0>)__base_}, {(std::__1::__tuple_leaf<1,$T2,0>)__base_}, {(std::__1::__tuple_leaf<2,$T3,0>)__base_}, {(std::__1::__tuple_leaf<3,$T4,0>)__base_}, {(std::__1::__tuple_leaf<4,$T5,0>)__base_}, {(std::__1::__tuple_leaf<5,$T6,0>)__base_}) + + ({(std::Cr::__tuple_leaf<0,$T1,0>)__base_}, {(std::Cr::__tuple_leaf<1,$T2,0>)__base_}, {(std::Cr::__tuple_leaf<2,$T3,0>)__base_}, {(std::Cr::__tuple_leaf<3,$T4,0>)__base_}, {(std::Cr::__tuple_leaf<4,$T5,0>)__base_}, {(std::Cr::__tuple_leaf<5,$T6,0>)__base_}) - (std::__1::__tuple_leaf<0,$T1,0>)__base_ - (std::__1::__tuple_leaf<1,$T2,0>)__base_ - (std::__1::__tuple_leaf<2,$T3,0>)__base_ - (std::__1::__tuple_leaf<3,$T4,0>)__base_ - (std::__1::__tuple_leaf<4,$T5,0>)__base_ - (std::__1::__tuple_leaf<5,$T6,0>)__base_ + (std::Cr::__tuple_leaf<0,$T1,0>)__base_ + (std::Cr::__tuple_leaf<1,$T2,0>)__base_ + (std::Cr::__tuple_leaf<2,$T3,0>)__base_ + (std::Cr::__tuple_leaf<3,$T4,0>)__base_ + (std::Cr::__tuple_leaf<4,$T5,0>)__base_ + (std::Cr::__tuple_leaf<5,$T6,0>)__base_ - - ({(std::__1::__tuple_leaf<0,$T1,0>)__base_}, {(std::__1::__tuple_leaf<1,$T2,0>)__base_}, {(std::__1::__tuple_leaf<2,$T3,0>)__base_}, {(std::__1::__tuple_leaf<3,$T4,0>)__base_}, {(std::__1::__tuple_leaf<4,$T5,0>)__base_}, {(std::__1::__tuple_leaf<5,$T6,0>)__base_}, {(std::__1::__tuple_leaf<6,$T7,0>)__base_}) + + ({(std::Cr::__tuple_leaf<0,$T1,0>)__base_}, {(std::Cr::__tuple_leaf<1,$T2,0>)__base_}, {(std::Cr::__tuple_leaf<2,$T3,0>)__base_}, {(std::Cr::__tuple_leaf<3,$T4,0>)__base_}, {(std::Cr::__tuple_leaf<4,$T5,0>)__base_}, {(std::Cr::__tuple_leaf<5,$T6,0>)__base_}, {(std::Cr::__tuple_leaf<6,$T7,0>)__base_}) - (std::__1::__tuple_leaf<0,$T1,0>)__base_ - (std::__1::__tuple_leaf<1,$T2,0>)__base_ - (std::__1::__tuple_leaf<2,$T3,0>)__base_ - (std::__1::__tuple_leaf<3,$T4,0>)__base_ - (std::__1::__tuple_leaf<4,$T5,0>)__base_ - (std::__1::__tuple_leaf<5,$T6,0>)__base_ - (std::__1::__tuple_leaf<6,$T7,0>)__base_ + (std::Cr::__tuple_leaf<0,$T1,0>)__base_ + (std::Cr::__tuple_leaf<1,$T2,0>)__base_ + (std::Cr::__tuple_leaf<2,$T3,0>)__base_ + (std::Cr::__tuple_leaf<3,$T4,0>)__base_ + (std::Cr::__tuple_leaf<4,$T5,0>)__base_ + (std::Cr::__tuple_leaf<5,$T6,0>)__base_ + (std::Cr::__tuple_leaf<6,$T7,0>)__base_ - + value() empty @@ -379,10 +379,10 @@ - - - - + + + + + {__cc} __cc - + {{ size={size()} }} diff --git a/build/config/chrome_build.gni b/build/config/chrome_build.gni index 5c51d7f2cc3d..b5156d5c677a 100644 --- a/build/config/chrome_build.gni +++ b/build/config/chrome_build.gni @@ -1,4 +1,4 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -7,15 +7,58 @@ declare_args() { # true means official Google Chrome branding (requires extra Google-internal # resources). is_chrome_branded = false + + # Whether to enable the Chrome for Testing (CfT) flavor. This arg is not + # compatible with `is_chrome_branded`. + # + # Design document: https://goo.gle/chrome-for-testing + is_chrome_for_testing = false + + # Whether to use internal Chrome for Testing (CfT). + # Requires `src-internal/` and `is_chrome_for_testing = true`. + # + # When true, use Google-internal icons, otherwise fall back to Chromium icons. + is_chrome_for_testing_branded = false + + # Set to true to enable settings for high end Android devices, typically + # enhancing speed at the expense of resources such as binary sizes and memory. + is_high_end_android = false + + if (is_android) { + # By default, Trichrome channels are compiled using separate package names. + # Set this to 'true' to compile Trichrome channels using the Stable channel's + # package name. This currently only affects builds with `android_channel = + # "beta"`. + use_stable_package_name_for_trichrome = false + } } +assert( + !is_chrome_for_testing || !is_chrome_branded, + "`is_chrome_for_testing = true` is incompatible with `is_chrome_branded = true`") + +assert( + is_chrome_for_testing || !is_chrome_for_testing_branded, + "`is_chrome_for_testing_branded = true` requires `is_chrome_for_testing = true`") + declare_args() { # Refers to the subdirectory for branding in various places including # chrome/app/theme. - if (is_chrome_branded) { + # + # `branding_path_product` must not contain slashes. + if (is_chrome_for_testing) { + if (is_chrome_for_testing_branded) { + branding_path_component = "google_chrome/google_chrome_for_testing" + } else { + branding_path_component = "chromium" + } + branding_path_product = "chromium" + } else if (is_chrome_branded) { branding_path_component = "google_chrome" + branding_path_product = "google_chrome" } else { branding_path_component = "chromium" + branding_path_product = "chromium" } } diff --git a/build/config/chromebox_for_meetings/BUILD.gn b/build/config/chromebox_for_meetings/BUILD.gn new file mode 100644 index 000000000000..08d74f97d263 --- /dev/null +++ b/build/config/chromebox_for_meetings/BUILD.gn @@ -0,0 +1,11 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/buildflag_header.gni") +import("//build/config/chromebox_for_meetings/buildflags.gni") + +buildflag_header("buildflags") { + header = "buildflags.h" + flags = [ "PLATFORM_CFM=$is_cfm" ] +} diff --git a/build/config/chromebox_for_meetings/OWNERS b/build/config/chromebox_for_meetings/OWNERS new file mode 100644 index 000000000000..985da0c1b6b4 --- /dev/null +++ b/build/config/chromebox_for_meetings/OWNERS @@ -0,0 +1 @@ +file://chromeos/ash/components/chromebox_for_meetings/OWNERS diff --git a/build/config/chromebox_for_meetings/README.md b/build/config/chromebox_for_meetings/README.md new file mode 100644 index 000000000000..ddbe3c9c254e --- /dev/null +++ b/build/config/chromebox_for_meetings/README.md @@ -0,0 +1,31 @@ +# CfM GN Build Flags + +Note: GN Flags are Build time flags + +You can get a comprehensive list of all arguments supported by gn by running the +command gn args --list out/some-directory (the directory passed to gn args is +required as gn args will invokes gn gen to generate the build.ninja files). + +## is_cfm (BUILDFLAG(PLATFORM_CFM)) + +Flag for building chromium for CfM devices. + +### Query Flag +```bash +$ gn args out_/{Release||Debug} --list=is_cfm +``` + +### Enable Flag +```bash +$ gn args out_/{Release||Debug} +$ Editor will open add is_cfm=true save and exit +``` + +### Alt: EnrollmentRequisitionManager + +We can alternatively use the EnrollmentRequisitionManager to determine if +chromium is running a CfM enabled Platform in source code + +```cpp +policy::EnrollmentRequisitionManager::IsRemoraRequisition(); +``` diff --git a/build/config/chromebox_for_meetings/buildflags.gni b/build/config/chromebox_for_meetings/buildflags.gni new file mode 100644 index 000000000000..22ad88a32b34 --- /dev/null +++ b/build/config/chromebox_for_meetings/buildflags.gni @@ -0,0 +1,8 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +declare_args() { + # True if compiling for Chromebox for Meeting devices. + is_cfm = false +} diff --git a/build/config/chromecast/BUILD.gn b/build/config/chromecast/BUILD.gn index 0c3b2cbeb208..acaf990a21ca 100644 --- a/build/config/chromecast/BUILD.gn +++ b/build/config/chromecast/BUILD.gn @@ -1,10 +1,10 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/config/chromecast_build.gni") -assert(is_chromecast) +assert(is_castos || is_cast_android) config("static_config") { if (!is_clang) { @@ -28,6 +28,7 @@ config("static_config") { config("ldconfig") { visibility = [ ":*" ] + configs = [] # Chromecast executables depend on several shared libraries in # /oem_cast_shlib, $ORIGIN, and $ORIGIN/lib. Add these rpaths to each binary. diff --git a/build/config/chromecast/OWNERS b/build/config/chromecast/OWNERS new file mode 100644 index 000000000000..253037d736b5 --- /dev/null +++ b/build/config/chromecast/OWNERS @@ -0,0 +1,3 @@ +mfoltz@chromium.org +rwkeane@google.com +seantopping@chromium.org diff --git a/build/config/chromecast_build.gni b/build/config/chromecast_build.gni index deecdb53b390..e8294ce7b021 100644 --- a/build/config/chromecast_build.gni +++ b/build/config/chromecast_build.gni @@ -1,25 +1,16 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # The args declared in this file should be referenced by components outside of # //chromecast. Args needed only in //chromecast should be declared in # //chromecast/chromecast.gni. +# +# TODO(crbug.com/1294964): Rename this file after is_chromecast is removed. declare_args() { - # Set this true for a Chromecast build. Chromecast builds are supported on - # Linux and Android. - is_chromecast = false - - # If true, IS_CAST_DEBUG_BUILD() will evaluate to 1 in version.h. Otherwise, - # it will evaluate to 0. Overriding this when is_debug=false is useful for - # doing engineering builds. - cast_is_debug = is_debug - - # chromecast_branding is used to include or exclude Google-branded components. - # Set it to "public" for a Chromium build. - chromecast_branding = "public" - # Set this true for an audio-only Chromecast build. + # TODO(crbug.com/1293538): Replace with a buildflag for speaker-only builds not + # specific to Cast. is_cast_audio_only = false # If non empty, rpath of executables is set to this. @@ -28,33 +19,63 @@ declare_args() { # Set true to enable modular_updater. enable_modular_updater = false + + # True to enable the cast audio renderer. + # + # TODO(crbug.com/1293520): Remove this buildflag. + enable_cast_audio_renderer = false + + # Set this to true to build for Nest hardware running Linux (aka "CastOS"). + # Set this to false to use the defaults for Linux. + is_castos = false + + # Set this to true to build for Android-based Cast devices. + # Set this to false to use the defaults for Android. + is_cast_android = false } -# Note(slan): This arg depends on the value of is_chromecast, and thus must be -# declared in a separate block. These blocks can be combined when/if -# crbug.com/542846 is resolved. +# Restrict is_castos and is_cast_android to only be set on the target toolchain. +is_castos = is_castos && current_toolchain == default_toolchain +is_cast_android = is_cast_android && current_toolchain == default_toolchain + declare_args() { - # True if Chromecast build is targeted for linux desktop. This type of build - # is useful for testing and development, but currently supports only a subset - # of Cast functionality. Though this defaults to true for x86 Linux devices, - # this should be overriden manually for an embedded x86 build. - # TODO(slan): Remove instances of this when x86 is a fully supported platform. - is_cast_desktop_build = is_chromecast && target_os == "linux" && - (target_cpu == "x86" || target_cpu == "x64") + # Set this true for a Chromecast build. Chromecast builds are supported on + # Linux, Android, ChromeOS, and Fuchsia. + enable_cast_receiver = false } declare_args() { - # True to enable the cast renderer. It is enabled by default for linux and - # android audio only builds. - enable_cast_renderer = is_chromecast && (is_linux || is_chromeos || - (is_cast_audio_only && is_android)) + # True to enable the cast renderer. It is enabled by default for Linux and + # Android audio only builds. + # + # TODO(crbug.com/1293520): Remove this buildflag. + enable_cast_renderer = + enable_cast_receiver && + (is_linux || is_chromeos || (is_cast_audio_only && is_android)) } # Configures media options for cast. See media/media_options.gni cast_mojo_media_services = [] cast_mojo_media_host = "" -if (enable_cast_renderer) { +if (enable_cast_audio_renderer) { + if (is_android) { + cast_mojo_media_services = [ + "cdm", + "audio_decoder", + ] + } + + if (!is_cast_audio_only) { + cast_mojo_media_services += [ "video_decoder" ] + } + + if (is_android && is_cast_audio_only) { + cast_mojo_media_host = "browser" + } else { + cast_mojo_media_host = "gpu" + } +} else if (enable_cast_renderer) { # In this path, mojo media services are hosted in two processes: # 1. "renderer" and "cdm" run in browser process. This is hard coded in the # code. @@ -63,7 +84,6 @@ if (enable_cast_renderer) { "cdm", "renderer", ] - if (!is_cast_audio_only) { cast_mojo_media_services += [ "video_decoder" ] } @@ -87,9 +107,18 @@ if (enable_cast_renderer) { } # Assert that Chromecast is being built for a supported platform. -assert(is_linux || is_chromeos || is_android || is_fuchsia || !is_chromecast, - "Chromecast builds are not supported on $target_os") +assert(is_linux || is_chromeos || is_android || is_fuchsia || + !enable_cast_receiver, + "Cast receiver builds are not supported on $current_os") + +assert(enable_cast_receiver || !is_cast_audio_only, + "is_cast_audio_only = true requires enable_cast_receiver = true.") + +assert(enable_cast_receiver || !is_castos, + "is_castos = true requires enable_cast_receiver = true.") +assert(is_linux || !is_castos, "is_castos = true requires is_linux = true.") -# Assert that is_cast_audio_only and is_cast_desktop_build are both false on a -# non-Chromecast build. -assert(is_chromecast || (!is_cast_audio_only && !is_cast_desktop_build)) +assert(enable_cast_receiver || !is_cast_android, + "is_cast_android = true requires enable_cast_receiver = true.") +assert(is_android || !is_cast_android, + "is_cast_android = true requires is_android = true.") diff --git a/build/config/chromeos/BUILD.gn b/build/config/chromeos/BUILD.gn index f3dfe70d424e..0606072656c8 100644 --- a/build/config/chromeos/BUILD.gn +++ b/build/config/chromeos/BUILD.gn @@ -1,10 +1,10 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/config/chromeos/ui_mode.gni") -assert(is_chromeos_ash) +assert(is_chromeos) declare_args() { # The location to a file used to dump symbols ordered by Call-Chain Clustering (C3) @@ -13,11 +13,6 @@ declare_args() { dump_call_chain_clustering_order = "" } -declare_args() { - # Whether or not we're using new pass manager to build and link Chrome - use_new_pass_manager = dump_call_chain_clustering_order != "" -} - config("print_orderfile") { if (dump_call_chain_clustering_order != "") { _output_orderfile = @@ -26,9 +21,42 @@ config("print_orderfile") { } } -config("compiler") { - if (use_new_pass_manager) { - cflags = [ "-fexperimental-new-pass-manager" ] - ldflags = [ "-fexperimental-new-pass-manager" ] +config("compiler_cpu_abi") { + # Lacros currently uses the *-generic-crosstoolchain.gni files generated + # by the simplechrome sdk in build/args/chromeos. These target triples + # match the target toolchain defaults in these directories. Passing them + # redundantly is harmless and prepares for using Chromium's toolchain. + # Non-Lacros Chrome OS builds use per-board toolchains, which might use + # different triples. So don't do this there. + if (is_chromeos_device && is_chromeos_lacros) { + if (current_cpu == "x64") { + asmflags = [ "--target=x86_64-cros-linux-gnu" ] + cflags = [ "--target=x86_64-cros-linux-gnu" ] + ldflags = [ "--target=x86_64-cros-linux-gnu" ] + } else if (current_cpu == "arm") { + asmflags = [ "--target=armv7a-cros-linux-gnueabihf" ] + cflags = [ "--target=armv7a-cros-linux-gnueabihf" ] + ldflags = [ "--target=armv7a-cros-linux-gnueabihf" ] + } else if (current_cpu == "arm64") { + asmflags = [ "--target=aarch64-cros-linux-gnu" ] + cflags = [ "--target=aarch64-cros-linux-gnu" ] + ldflags = [ "--target=aarch64-cros-linux-gnu" ] + } else { + assert(false, "add support for $current_cpu here") + } + } +} + +config("runtime_library") { + # These flags are added by the Chrome OS toolchain compiler wrapper, + # or are implicitly passed by Chome OS's toolchain's clang due to the cmake + # flags that clang was built with. + # Passing them redundantly is harmless and prepares for using Chromium's + # toolchain for Lacros. + if (is_chromeos_device) { + ldflags = [ + "--rtlib=compiler-rt", + "--unwindlib=libunwind", + ] } } diff --git a/build/config/chromeos/args.gni b/build/config/chromeos/args.gni index 5ae6277454aa..8fb50537461d 100644 --- a/build/config/chromeos/args.gni +++ b/build/config/chromeos/args.gni @@ -1,4 +1,4 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -24,4 +24,18 @@ declare_args() { # https://chromium.googlesource.com/chromium/src/+/main/docs/chromeos_build_instructions.md # https://chromium.googlesource.com/chromiumos/docs/+/main/simple_chrome_workflow.md is_chromeos_device = false + + # Determines if we run the test in skylab, aka the CrOS labs. + is_skylab = false + + # Determines if we collect hardware information in chrome://system and + # feedback logs. A similar build flag "hw_details" is defined in Chrome OS + # (see https://crrev.com/c/3123455). + is_chromeos_with_hw_details = false + + # Determines if we're willing to link against libinput + use_libinput = false + + # Refers to the separate branding required for the reven build. + is_reven = false } diff --git a/build/config/chromeos/rules.gni b/build/config/chromeos/rules.gni index 0ccbcb22a184..10af886af5ef 100644 --- a/build/config/chromeos/rules.gni +++ b/build/config/chromeos/rules.gni @@ -1,15 +1,16 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/config/chrome_build.gni") import("//build/config/chromeos/args.gni") import("//build/config/chromeos/ui_mode.gni") +import("//build/config/dcheck_always_on.gni") import("//build/config/gclient_args.gni") import("//build/config/python.gni") import("//build/util/generate_wrapper.gni") -assert((is_chromeos_ash || is_chromeos_lacros) && is_chromeos_device) +assert(is_chromeos && is_chromeos_device) # Determine the real paths for various items in the SDK, which may be used # in the 'generate_runner_script' template below. We do so outside the template @@ -57,8 +58,7 @@ if (cros_sdk_version != "") { _symlinks = [] _symlinks = [ # Tast harness & test data. - rebase_path("${_cache_path_prefix}+chromeos-base/tast-cmd"), - rebase_path("${_cache_path_prefix}+chromeos-base/tast-remote-tests-cros"), + rebase_path("${_cache_path_prefix}+autotest_server_package.tar.bz2"), # Binutils (and other toolchain tools) used to deploy Chrome to the device. rebase_path( @@ -69,7 +69,7 @@ if (cros_sdk_version != "") { # VM-related tools. _symlinks += [ rebase_path("${_cache_path_prefix}+sys-firmware/seabios"), - rebase_path("${_cache_path_prefix}+chromiumos_qemu_image.tar.xz"), + rebase_path("${_cache_path_prefix}+chromiumos_test_image.tar.xz"), rebase_path("${_cache_path_prefix}+app-emulation/qemu"), ] } @@ -100,6 +100,133 @@ if (cros_sdk_version != "") { } } +# Creates dependencies required by skylab testing. If passed the +# generated_script and test_exe this will generate the skylab runner script. +# If passed tast_attr_expr, tast_tests or tast_disabled_tests this will +# generate a filter file containing the expression for running tests in skylab. +# Args: +# generated_script: Name of the generated runner script created for test_exe +# test_exe: Name of the executable to run with the generated script. +# tast_attr_expr: Tast expression to determine tests to run. This creates the +# initial set of tests that can be further filtered.. +# tast_tests: Names of tests to enable in tast. All other tests will be +# disabled that are not listed. +# tast_disabled_tests: Names of tests to disable in tast. All other tests that +# match the tast expression will still run. +# tast_control: gni file with collections of tests to be used for specific +# filters (e.g. "//chromeos/tast_control.gni"). Any lists of strings in +# this file will be used to generate additional tast expressions with +# those strings expanded into tests to disable (i.e. as && !"name:test"). +# The name of those lists are then intended to be used to specify in +# test_suites.pyl which collection to be used on specific test suites. +template("generate_skylab_deps") { + forward_variables_from(invoker, + [ + "generated_script", + "test_exe", + "tast_attr_expr", + "tast_tests", + "tast_disabled_tests", + "tast_control", + ]) + if (defined(test_exe) || defined(generated_script)) { + assert(defined(test_exe) && defined(generated_script), + "The test_exe and generated_script must both be defined when " + + "generating the skylab runner script") + action(target_name) { + script = "//build/chromeos/generate_skylab_deps.py" + outputs = [ generated_script ] + args = [ + "generate-runner", + "--test-exe", + test_exe, + "--output", + rebase_path(generated_script, root_build_dir), + ] + + deps = [ "//testing/buildbot/filters:chromeos_filters" ] + if (defined(invoker.deps)) { + deps += invoker.deps + } + + data = [ generated_script ] + if (defined(invoker.data)) { + data += invoker.data + } + + data_deps = [ "//testing:test_scripts_shared" ] + if (defined(invoker.data_deps)) { + data_deps += invoker.data_deps + } + } + } + if (defined(tast_attr_expr) || defined(tast_tests) || + defined(tast_disabled_tests)) { + if (defined(tast_disabled_tests)) { + assert(defined(tast_attr_expr), + "tast_attr_expr must be used when specifying tast_disabled_tests.") + } + _generated_filter = "$root_build_dir/bin/${target_name}.filter" + _skylab_args = [ + "generate-filter", + "--output", + rebase_path(_generated_filter), + ] + if (defined(tast_control)) { + _skylab_args += [ + "--tast-control", + rebase_path(tast_control), + ] + } + if (defined(tast_attr_expr)) { + _skylab_args += [ + "--tast-expr", + tast_attr_expr, + ] + } + if (defined(tast_tests)) { + foreach(_test, tast_tests) { + _skylab_args += [ + "--enabled-tests", + _test, + ] + } + } + if (defined(tast_disabled_tests)) { + foreach(_test, tast_disabled_tests) { + _excluded_test_name_and_board = [] + _excluded_test_name_and_board = string_split(_test, "@") + [ "" ] + _excluded_test_name = _excluded_test_name_and_board[0] + _excluded_board = _excluded_test_name_and_board[1] + if (_excluded_board == "" || _excluded_board == cros_board) { + _skylab_args += [ + "--disabled-tests", + _excluded_test_name, + ] + } + } + } + action(target_name) { + script = "//build/chromeos/generate_skylab_deps.py" + if (defined(tast_control)) { + sources = [ tast_control ] + } + outputs = [ _generated_filter ] + args = _skylab_args + if (defined(invoker.data_deps)) { + data_deps = invoker.data_deps + } + data = [ _generated_filter ] + if (defined(invoker.data)) { + data += invoker.data + } + if (defined(invoker.deps)) { + deps = invoker.deps + } + } + } +} + # Creates a script at $generated_script that can be used to launch a cros VM # and optionally run a test within it. # Args: @@ -123,6 +250,7 @@ if (cros_sdk_version != "") { # them, and it's designed for use cases where one builds for one board # (e.g. amd64-generic), but tests on a different board (e.g. eve). # tast_vars: A list of "key=value" runtime variable pairs to pass to invoke +# strip_chrome: If true, strips Chrome before deploying it for non-Tast tests. # the Tast tests. For more details, please see: # https://chromium.googlesource.com/chromiumos/platform/tast/+/HEAD/docs/writing_tests.md#Runtime-variables template("generate_runner_script") { @@ -133,6 +261,7 @@ template("generate_runner_script") { "generated_script", "runtime_deps_file", "skip_generating_board_args", + "strip_chrome", "tast_attr_expr", "tast_tests", "tast_vars", @@ -157,9 +286,9 @@ template("generate_runner_script") { if (!defined(deploy_lacros)) { deploy_lacros = false } - assert(!(deploy_chrome && deploy_lacros), - "deploy_chrome and deploy_lacros are exclusive.") - + if (!defined(strip_chrome)) { + strip_chrome = false + } is_tast = defined(tast_attr_expr) || defined(tast_tests) assert(!(is_tast && defined(test_exe)), "Tast tests are invoked from binaries shipped with the VM image. " + @@ -167,6 +296,10 @@ template("generate_runner_script") { assert(is_tast || !defined(tast_vars), "tast_vars is only support for Tast tests") + if (is_tast) { + not_needed([ "strip_chrome" ]) + } + # If we're in the cros chrome-sdk (and not the raw ebuild), the test will # need some additional runtime data located in the SDK cache. _sdk_data = [] @@ -196,26 +329,23 @@ template("generate_runner_script") { if (is_tast) { # Add tast sdk items. - _sdk_data += [ - _symlink_targets[0], - _symlink_targets[1], - ] + _sdk_data += [ _symlink_targets[0] ] } if (deploy_chrome) { # To deploy chrome to the VM, it needs to be stripped down to fit into # the VM. This is done by using binutils in the toolchain. So add the # toolchain to the data. _sdk_data += [ + _symlink_targets[1], _symlink_targets[2], - _symlink_targets[3], ] } if (_cros_is_vm) { # Add vm sdk items. _sdk_data += [ + _symlink_targets[3], _symlink_targets[4], _symlink_targets[5], - _symlink_targets[6], ] } } @@ -224,7 +354,6 @@ template("generate_runner_script") { generate_wrapper(target_name) { executable = "//build/chromeos/test_runner.py" - use_vpython3 = true wrapper_script = generated_script executable_args = [] @@ -275,6 +404,12 @@ template("generate_runner_script") { ] } } + if (dcheck_always_on) { + executable_args += [ + "--tast-extra-use-flags", + "chrome_dcheck", + ] + } } else { executable_args += [ "host-cmd" ] } @@ -286,6 +421,10 @@ template("generate_runner_script") { "-v", ] + if (!is_tast && strip_chrome) { + executable_args += [ "--strip-chrome" ] + } + if (!skip_generating_board_args) { executable_args += [ "--board", @@ -315,7 +454,7 @@ template("generate_runner_script") { executable_args += [ "--deploy-lacros" ] } - if (deploy_chrome && !defined(test_exe) && !is_tast) { + if (deploy_chrome && !defined(test_exe)) { executable_args += [ "--deploy-chrome" ] } @@ -325,7 +464,6 @@ template("generate_runner_script") { deps += invoker.deps } data = [ - "//.vpython", "//.vpython3", # We use android test-runner's results libs to construct gtest output @@ -334,10 +472,12 @@ template("generate_runner_script") { "//build/android/pylib/base/", "//build/android/pylib/results/", "//build/chromeos/", + "//build/util/", # Needed for various SDK components used below. "//build/cros_cache/chrome-sdk/misc/", "//build/cros_cache/chrome-sdk/symlinks/", + "//chrome/VERSION", # The LKGM file controls what version of the VM image to download. Add it # as data here so that changes to it will trigger analyze. @@ -361,6 +501,10 @@ template("generate_runner_script") { template("tast_test") { forward_variables_from(invoker, "*") + if (!defined(deploy_lacros_chrome)) { + deploy_lacros_chrome = false + } + # Default the expression to match any chrome-related test. if (!defined(tast_attr_expr) && !defined(tast_tests)) { # The following expression filters out all non-critical tests. See the link @@ -386,8 +530,14 @@ template("tast_test") { if (defined(tast_disabled_tests)) { assert(defined(tast_attr_expr), "tast_attr_expr must be used when specifying tast_disabled_tests.") - foreach(test, tast_disabled_tests) { - tast_attr_expr += " && !\"name:${test}\"" + foreach(_test, tast_disabled_tests) { + _excluded_test_name_and_board = [] + _excluded_test_name_and_board = string_split(_test, "@") + [ "" ] + _excluded_test_name = _excluded_test_name_and_board[0] + _excluded_board = _excluded_test_name_and_board[1] + if (_excluded_board == "" || _excluded_board == cros_board) { + tast_attr_expr += " && !\"name:${_excluded_test_name}\"" + } } } if (defined(tast_attr_expr)) { @@ -398,6 +548,7 @@ template("tast_test") { generated_script = "$root_build_dir/bin/run_${target_name}" runtime_deps_file = "$root_out_dir/${target_name}.runtime_deps" deploy_chrome = true + deploy_lacros = deploy_lacros_chrome data_deps = [ "//:chromiumos_preflight", # Builds the browser. "//chromeos:cros_chrome_deploy", # Adds additional browser run-time deps. @@ -411,6 +562,12 @@ template("tast_test") { ] data = [ "//components/crash/content/tools/generate_breakpad_symbols.py" ] + if (deploy_lacros_chrome) { + data += [ + # A script needed to launch Lacros in Lacros Tast tests. + "//build/lacros/mojo_connection_lacros_launcher.py", + ] + } } } @@ -420,56 +577,77 @@ template("lacros_tast_tests") { "tast_attr_expr", "tast_disabled_tests", "tast_tests", + "tast_control", ]) assert(defined(tast_attr_expr) != defined(tast_tests), "Specify one of tast_tests or tast_attr_expr.") - # Append any disabled tests to the expression. - if (defined(tast_disabled_tests)) { - assert(defined(tast_attr_expr), - "tast_attr_expr must be used when specifying tast_disabled_tests.") - foreach(test, tast_disabled_tests) { - tast_attr_expr += " && !\"name:${test}\"" - } - } - if (defined(tast_attr_expr)) { - tast_attr_expr = "( " + tast_attr_expr + " )" - } - - generate_runner_script(target_name) { - testonly = true - deploy_lacros = true - generated_script = "$root_build_dir/bin/run_${target_name}" - runtime_deps_file = "$root_out_dir/${target_name}.runtime_deps" + _lacros_data_deps = [ + "//chrome", # Builds the browser. - # At build time, Lacros tests don't know whether they'll run on VM or HW, - # and instead, these flags are specified at runtime when invoking the - # generated runner script. - skip_generating_board_args = true + # Tools used to symbolize Chrome crash dumps. + # TODO(crbug.com/1156772): Remove these if/when all tests pick them up by + # default. + "//third_party/breakpad:dump_syms", + "//third_party/breakpad:minidump_dump", + "//third_party/breakpad:minidump_stackwalk", + ] - # By default, tast tests download a lacros-chrome from a gcs location and - # use it for testing. To support running lacros tast tests from Chromium CI, - # a Var is added to support pointing the tast tests to use a specified - # pre-deployed lacros-chrome. The location is decided by: - # https://source.chromium.org/chromium/chromium/src/+/main:third_party/chromite/scripts/deploy_chrome.py;l=80;drc=86f1234a4be8e9574442e076cdc835897f7bea61 - tast_vars = [ "lacrosDeployedBinary=/usr/local/lacros-chrome" ] + _lacros_data = [ + "//components/crash/content/tools/generate_breakpad_symbols.py", - data_deps = [ - "//chrome", # Builds the browser. + # A script needed to launch Lacros in Lacros Tast tests. + "//build/lacros/mojo_connection_lacros_launcher.py", + ] - # Tools used to symbolize Chrome crash dumps. - # TODO(crbug.com/1156772): Remove these if/when all tests pick them up by - # default. - "//third_party/breakpad:dump_syms", - "//third_party/breakpad:minidump_dump", - "//third_party/breakpad:minidump_stackwalk", - ] - - data = [ - "//components/crash/content/tools/generate_breakpad_symbols.py", + if (is_skylab) { + generate_skylab_deps(target_name) { + data = _lacros_data + data_deps = _lacros_data_deps - # A script needed to launch Lacros in Lacros Tast tests. - "//build/lacros/mojo_connection_lacros_launcher.py", - ] + # To disable a test on specific milestones, add it to the appropriate + # collection in the following file + tast_control = "//chromeos/tast_control.gni" + } + } else { + # Append any disabled tests to the expression. + if (defined(tast_disabled_tests)) { + assert(defined(tast_attr_expr), + "tast_attr_expr must be used when specifying tast_disabled_tests.") + foreach(_test, tast_disabled_tests) { + _excluded_test_name_and_board = [] + _excluded_test_name_and_board = string_split(_test, "@") + [ "" ] + _excluded_test_name = _excluded_test_name_and_board[0] + _excluded_board = _excluded_test_name_and_board[1] + if (_excluded_board == "" || _excluded_board == cros_board) { + tast_attr_expr += " && !\"name:${_excluded_test_name}\"" + } + } + } + if (defined(tast_attr_expr)) { + tast_attr_expr = "( " + tast_attr_expr + " )" + } + generate_runner_script(target_name) { + testonly = true + deploy_lacros = true + generated_script = "$root_build_dir/bin/run_${target_name}" + runtime_deps_file = "$root_out_dir/${target_name}.runtime_deps" + + # At build time, Lacros tests don't know whether they'll run on VM or HW, + # and instead, these flags are specified at runtime when invoking the + # generated runner script. + skip_generating_board_args = true + + # By default, tast tests download a lacros-chrome from a gcs location and + # use it for testing. To support running lacros tast tests from Chromium CI, + # a Var is added to support pointing the tast tests to use a specified + # pre-deployed lacros-chrome. The location is decided by: + # https://source.chromium.org/chromium/chromium/src/+/main:third_party/chromite/scripts/deploy_chrome.py;l=80;drc=86f1234a4be8e9574442e076cdc835897f7bea61 + tast_vars = [ "lacros.DeployedBinary=/usr/local/lacros-chrome" ] + + data_deps = _lacros_data_deps + + data = _lacros_data + } } } diff --git a/build/config/chromeos/ui_mode.gni b/build/config/chromeos/ui_mode.gni index 208969e006f5..ce8fa8b4da8c 100644 --- a/build/config/chromeos/ui_mode.gni +++ b/build/config/chromeos/ui_mode.gni @@ -1,9 +1,11 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +import("//build/config/chromeos/args.gni") + declare_args() { - # Deprecated, use is_lacros. + # Deprecated, use is_chromeos_lacros. # # This controls UI configuration for Chrome. # If this flag is set, we assume Chrome runs on Chrome OS devices, using @@ -12,15 +14,25 @@ declare_args() { # TODO(crbug.com/1052397): # Define chromeos_product instead, which takes either "browser" or "ash". # Re-define the following variables as: - # is_lacros = chromeos_product == "browser" - # is_ash = chromeos_product == "ash" + # is_chromeos_lacros = chromeos_product == "browser" + # is_chromeos_ash = chromeos_product == "ash" chromeos_is_browser_only = false - # Setting this to true when building LaCrOS-chrome will cause it to - # *also* build ash-chrome in a subdirectory using an alternate toolchain. + # Setting this to true when building linux Lacros-chrome will cause it to + # *also* build linux ash-chrome in a subdirectory using an alternate + # toolchain. # Don't set this unless you're sure you want it, because it'll double # your build time. also_build_ash_chrome = false + + # Setting this to true when building linux ash-chrome will cause it to + # *also* build linux Lacros-chrome in a subdirectory using an alternate toolchain. + also_build_lacros_chrome = false + + # Setting this when building ash-chrome will cause it to + # *also* build Lacros-chrome in a subdirectory using an alternate toolchain. + # You can set this to either "amd64" or "arm". + also_build_lacros_chrome_for_architecture = "" } # is_chromeos_{ash,lacros} is used to specify that it is specific to either @@ -31,3 +43,18 @@ declare_args() { # toolchains. is_chromeos_ash = is_chromeos && !chromeos_is_browser_only is_chromeos_lacros = is_chromeos && chromeos_is_browser_only + +# also_build_ash_chrome and also_build_lacros_chrome cannot be both true. +assert(!(also_build_ash_chrome && also_build_lacros_chrome)) + +# Can't set both also_build_lacros_chrome and +# also_build_lacros_chrome_for_architecture. +assert(!(also_build_lacros_chrome == true && + also_build_lacros_chrome_for_architecture != "")) + +# also_build_lacros_chrome_for_architecture is for device only. +assert(is_chromeos_device || also_build_lacros_chrome_for_architecture == "") + +# also_build_lacros_chrome_for_architecture is for ash build only. +assert(!chromeos_is_browser_only || + also_build_lacros_chrome_for_architecture == "") diff --git a/build/config/clang/BUILD.gn b/build/config/clang/BUILD.gn index 180e2e626be9..ed39cc68cc40 100644 --- a/build/config/clang/BUILD.gn +++ b/build/config/clang/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -15,10 +15,16 @@ config("find_bad_constructs") { "-add-plugin", "-Xclang", "find-bad-constructs", + + "-Xclang", + "-plugin-arg-find-bad-constructs", + "-Xclang", + "raw-ref-template-as-trivial-member", + "-Xclang", "-plugin-arg-find-bad-constructs", "-Xclang", - "checked-ptr-as-trivial-member", + "check-stack-allocated", ] if (is_linux || is_chromeos || is_android || is_fuchsia) { @@ -29,6 +35,27 @@ config("find_bad_constructs") { "check-ipc", ] } + + if (enable_check_raw_ptr_fields) { + cflags += [ + "-Xclang", + "-plugin-arg-find-bad-constructs", + "-Xclang", + "check-raw-ptr-fields", + + # TODO(keishi): Remove this once crrev.com/c/4387753 is rolled out. + "-Xclang", + "-plugin-arg-find-bad-constructs", + "-Xclang", + "raw-ptr-exclude-path=base/no_destructor.h", + + # TODO(keishi): Remove this once crrev.com/c/4086161 lands. + "-Xclang", + "-plugin-arg-find-bad-constructs", + "-Xclang", + "raw-ptr-exclude-path=base/containers/span.h", + ] + } } } @@ -50,10 +77,5 @@ group("llvm-symbolizer_data") { data = [ "$clang_base_path/bin/llvm-symbolizer.exe" ] } else { data = [ "$clang_base_path/bin/llvm-symbolizer" ] - - if (!is_apple) { - # llvm-symbolizer uses libstdc++ from the clang package. - data += [ "$clang_base_path/lib/libstdc++.so.6" ] - } } } diff --git a/build/config/clang/clang.gni b/build/config/clang/clang.gni index 977529653438..ff978bcd0dde 100644 --- a/build/config/clang/clang.gni +++ b/build/config/clang/clang.gni @@ -1,11 +1,11 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/toolchain/toolchain.gni") if (!use_cobalt_customizations) { -default_clang_base_path = "//third_party/llvm-build/Release+Asserts" + default_clang_base_path = "//third_party/llvm-build/Release+Asserts" } declare_args() { @@ -13,10 +13,14 @@ declare_args() { # coding guidelines, etc. Only used when compiling with Chrome's Clang, not # Chrome OS's. clang_use_chrome_plugins = - is_clang && !is_nacl && !use_xcode_clang && + is_clang && !is_nacl && current_os != "zos" && default_toolchain != "//build/toolchain/cros:target" -if (!use_cobalt_customizations) { - clang_base_path = default_clang_base_path -} + enable_check_raw_ptr_fields = + build_with_chromium && !is_official_build && + ((is_linux && !is_castos) || (is_android && !is_cast_android)) + + if (!use_cobalt_customizations) { + clang_base_path = default_clang_base_path + } } diff --git a/build/config/compiler/BUILD.gn b/build/config/compiler/BUILD.gn index 29b26ff69e0b..12742fd5c37b 100644 --- a/build/config/compiler/BUILD.gn +++ b/build/config/compiler/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -6,7 +6,6 @@ import("//build/buildflag_header.gni") import("//build/config/android/config.gni") import("//build/config/c++/c++.gni") import("//build/config/chrome_build.gni") -import("//build/config/chromecast_build.gni") import("//build/config/chromeos/args.gni") import("//build/config/chromeos/ui_mode.gni") import("//build/config/clang/clang.gni") @@ -17,6 +16,7 @@ if (!use_cobalt_customizations) { import("//build/config/gclient_args.gni") } import("//build/config/host_byteorder.gni") +import("//build/config/rust.gni") import("//build/config/sanitizers/sanitizers.gni") import("//build/config/ui.gni") import("//build/toolchain/cc_wrapper.gni") @@ -32,9 +32,6 @@ if (current_cpu == "mipsel" || current_cpu == "mips64el" || current_cpu == "mips" || current_cpu == "mips64") { import("//build/config/mips.gni") } -if (current_cpu == "x64") { - import("//build/config/x64.gni") -} if (is_mac) { import("//build/config/apple/symbols.gni") } @@ -77,9 +74,7 @@ declare_args() { # Enable fatal linker warnings. Building Chromium with certain versions # of binutils can cause linker warning. - # TODO(thakis): Set this to true unconditionally once lld/MachO bring-up - # is along far enough that it no longer emits linker warnings. - fatal_linker_warnings = !(is_apple && use_lld) + fatal_linker_warnings = true # Build with C++ RTTI enabled. Chromium builds without RTTI by default, # but some sanitizers are known to require it, like CFI diagnostics @@ -92,9 +87,6 @@ declare_args() { # the needed gcov profiling data. auto_profile_path = "" - # Allow projects that wish to stay on C++11 to override Chromium's default. - use_cxx11 = false - # Path to an AFDO profile to use while building with clang, if any. Empty # implies none. clang_sample_profile_path = "" @@ -107,7 +99,7 @@ declare_args() { # nonsensical for said projects. clang_use_default_sample_profile = chrome_pgo_phase == 0 && build_with_chromium && is_official_build && - (is_android || chromeos_is_browser_only || is_chromecast) + (is_android || chromeos_is_browser_only) # This configuration is used to select a default profile in Chrome OS based on # the microarchitectures we are using. This is only used if @@ -116,7 +108,8 @@ declare_args() { chromeos_afdo_platform = "atom" # Emit debug information for profiling wile building with clang. - clang_emit_debug_info_for_profiling = false + # Only enable this for ChromeOS official builds for AFDO. + clang_emit_debug_info_for_profiling = is_chromeos_device && is_official_build # Turn this on to have the compiler output extra timing information. compiler_timing = false @@ -133,7 +126,8 @@ declare_args() { # the space overhead is too great. We should use some mixture of profiles and # optimization settings to better tune the size increase. thin_lto_enable_optimizations = - (is_chromeos_ash || is_android || is_win || is_linux) && is_official_build + (is_chromeos || is_android || is_win || is_linux || is_mac || + (is_ios && use_lld)) && is_official_build # Initialize all local variables with a pattern. This flag will fill # uninitialized floating-point types (and 32-bit pointers) with 0xFF and the @@ -141,12 +135,13 @@ declare_args() { # recognizable in the debugger, and crashes on memory accesses through # uninitialized pointers. # - # TODO(crbug.com/1131993): Enabling this when 'is_android' is true breaks - # content_shell_test_apk on both ARM and x86. - # - # TODO(crbug.com/977230): Enabling this when 'use_xcode_clang' is true may - # call an old clang that doesn't support auto-init. - init_stack_vars = !is_android && !use_xcode_clang + # TODO(crbug.com/1131993): This regresses binary size by ~1MB on Android and + # needs to be evaluated before enabling it there as well. + init_stack_vars = !(is_android && is_official_build) + + # Zero init has favorable performance/size tradeoffs for Chrome OS + # but was not evaluated for other platforms. + init_stack_vars_zero = is_chromeos # This argument is to control whether enabling text section splitting in the # final binary. When enabled, the separated text sections with prefix @@ -157,16 +152,6 @@ declare_args() { # The gold linker by default has text section splitting enabled. use_text_section_splitting = false - # Token limits may not be accurate for build configs not covered by the CQ, - # so only enable them by default for mainstream build configs. - enable_wmax_tokens = - !is_official_build && - ((is_mac && target_cpu == "x64" && !use_system_xcode) || - (is_linux && !is_chromeos && target_cpu == "x64") || - (is_win && target_cpu == "x86") || (is_win && target_cpu == "x64") || - (is_android && target_cpu == "arm") || - (is_android && target_cpu == "arm64")) - # Turn off the --call-graph-profile-sort flag for lld by default. Enable # selectively for targets where it's beneficial. enable_call_graph_profile_sort = chrome_pgo_phase == 2 @@ -185,11 +170,19 @@ declare_args() { # Enable -H, which prints the include tree during compilation. # For use by tools/clang/scripts/analyze_includes.py show_includes = false -} -declare_args() { - # C++11 may not be an option if Android test infrastructure is used. - use_cxx11_on_android = use_cxx11 + # Enable Profi algorithm. Profi can infer block and edge counts. + # https://clang.llvm.org/docs/UsersManual.html#using-sampling-profilers + # TODO(crbug.com/1375958i:) Possibly enable this for Android too. + use_profi = is_chromeos + + # If true, linker crashes will be rerun with `--reproduce` which causes + # a reproducer file to be saved. + save_reproducers_on_lld_crash = false + + # Allow projects that wish to stay on C++17 to override Chromium's default. + # TODO(crbug.com/1402249): evaluate removing this end of 2023 + use_cxx17 = false } declare_args() { @@ -202,11 +195,11 @@ declare_args() { # other architectures. # # lld doesn't have the bug. - use_icf = - (is_posix || is_fuchsia) && !is_debug && !using_sanitizer && - !use_clang_coverage && !(is_android && use_order_profiling) && - (use_lld || (use_gold && (is_chromeos_ash || !(current_cpu == "x86" || - current_cpu == "x64")))) + use_icf = (is_posix || is_fuchsia) && !is_debug && !using_sanitizer && + !use_clang_coverage && current_os != "zos" && + !(is_android && use_order_profiling) && + (use_lld || (use_gold && (is_chromeos || !(current_cpu == "x86" || + current_cpu == "x64")))) } if (is_android || (is_chromeos_ash && is_chromeos_device)) { @@ -228,7 +221,8 @@ if (is_android || (is_chromeos_ash && is_chromeos_device)) { assert(!(llvm_force_head_revision && use_goma), "can't use goma with trunk clang") -assert(!(llvm_force_head_revision && use_rbe), "can't use rbe with trunk clang") +assert(!(llvm_force_head_revision && use_remoteexec), + "can't use rbe with trunk clang") # default_include_dirs --------------------------------------------------------- # @@ -242,6 +236,18 @@ config("default_include_dirs") { ] } +# Compiler instrumentation can introduce dependencies in DSOs to symbols in +# the executable they are loaded into, so they are unresolved at link-time. +config("no_unresolved_symbols") { + if (!using_sanitizer && + (is_linux || is_chromeos || is_android || is_fuchsia)) { + ldflags = [ + "-Wl,-z,defs", + "-Wl,--as-needed", + ] + } +} + # compiler --------------------------------------------------------------------- # # Base compiler configuration. @@ -269,9 +275,6 @@ config("compiler") { configs += [ "//build/config/android:compiler" ] } else if (is_linux || is_chromeos) { configs += [ "//build/config/linux:compiler" ] - if (is_chromeos_ash) { - configs += [ "//build/config/chromeos:compiler" ] - } } else if (is_nacl) { configs += [ "//build/config/nacl:compiler" ] } else if (is_mac) { @@ -282,11 +285,14 @@ config("compiler") { configs += [ "//build/config/fuchsia:compiler" ] } else if (current_os == "aix") { configs += [ "//build/config/aix:compiler" ] + } else if (current_os == "zos") { + configs += [ "//build/config/zos:compiler" ] } configs += [ # See the definitions below. ":clang_revision", + ":rustc_revision", ":compiler_cpu_abi", ":compiler_codegen", ":compiler_deterministic", @@ -319,7 +325,7 @@ config("compiler") { cflags += [ "-fno-strict-aliasing" ] # See http://crbug.com/32204 # Stack protection. - if (is_mac) { + if (is_apple) { # The strong variant of the stack protector significantly increases # binary size, so only enable it in debug mode. if (is_debug) { @@ -327,10 +333,12 @@ config("compiler") { } else { cflags += [ "-fstack-protector" ] } - } else if ((is_posix && !is_chromeos_ash && !is_nacl) || is_fuchsia) { + } else if ((is_posix && !is_chromeos && !is_nacl) || is_fuchsia) { # TODO(phajdan.jr): Use -fstack-protector-strong when our gcc supports it. # See also https://crbug.com/533294 - cflags += [ "--param=ssp-buffer-size=4" ] + if (current_os != "zos") { + cflags += [ "--param=ssp-buffer-size=4" ] + } # The x86 toolchain currently has problems with stack-protector. if (is_android && current_cpu == "x86") { @@ -349,7 +357,8 @@ config("compiler") { } # Linker warnings. - if (fatal_linker_warnings && !is_apple && current_os != "aix") { + if (fatal_linker_warnings && !is_apple && current_os != "aix" && + current_os != "zos") { ldflags += [ "-Wl,--fatal-warnings" ] } if (fatal_linker_warnings && is_apple) { @@ -368,7 +377,7 @@ config("compiler") { ] } - # Non-Mac Posix and Fuchsia compiler flags setup. + # Non-Apple Posix and Fuchsia compiler flags setup. # ----------------------------------- if ((is_posix && !is_apple) || is_fuchsia) { if (enable_profiling) { @@ -392,7 +401,7 @@ config("compiler") { # compute, so only use it in the official build to avoid slowing down # links. ldflags += [ "-Wl,--build-id=sha1" ] - } else if (current_os != "aix") { + } else if (current_os != "aix" && current_os != "zos") { ldflags += [ "-Wl,--build-id" ] } @@ -420,6 +429,23 @@ config("compiler") { } } + # Apple compiler flags setup. + # --------------------------------- + if (is_apple) { + # On Intel, clang emits both Apple's "compact unwind" information and + # DWARF eh_frame unwind information by default, for compatibility reasons. + # This flag limits emission of eh_frame information to functions + # whose unwind information can't be expressed in the compact unwind format + # (which in practice means almost everything gets only compact unwind + # entries). This reduces object file size a bit and makes linking a bit + # faster. + # On arm64, this is already the default behavior. + if (current_cpu == "x64") { + asmflags += [ "-femit-dwarf-unwind=no-compact-unwind" ] + cflags += [ "-femit-dwarf-unwind=no-compact-unwind" ] + } + } + # Linux/Android/Fuchsia common flags setup. # --------------------------------- if (is_linux || is_chromeos || is_android || is_fuchsia) { @@ -441,27 +467,11 @@ config("compiler") { if (!is_component_build) { ldflags += [ "-Wl,-z,now" ] } - - # Compiler instrumentation can introduce dependencies in DSOs to symbols in - # the executable they are loaded into, so they are unresolved at link-time. - if (!using_sanitizer) { - ldflags += [ - "-Wl,-z,defs", - "-Wl,--as-needed", - ] - } } # Linux-specific compiler flags setup. # ------------------------------------ - if ((is_posix || is_fuchsia) && !is_apple && use_lld) { - if (current_cpu == "arm64") { - # Reduce the page size from 65536 in order to reduce binary size slightly - # by shrinking the alignment gap between segments. This also causes all - # segments to be mapped adjacently, which breakpad relies on. - ldflags += [ "-Wl,-z,max-page-size=4096" ] - } - } else if (use_gold) { + if (use_gold) { ldflags += [ "-fuse-ld=gold" ] if (!is_android) { # On Android, this isn't needed. gcc in the NDK knows to look next to @@ -490,7 +500,7 @@ config("compiler") { #} } - if (use_icf && !is_apple) { + if (use_icf && (!is_apple || use_lld)) { ldflags += [ "-Wl,--icf=all" ] } @@ -534,8 +544,14 @@ config("compiler") { ldflags += [ "-Wl,-z,keep-text-section-prefix" ] } - if (is_clang && !is_nacl && !use_xcode_clang) { + if (is_clang && !is_nacl && current_os != "zos") { cflags += [ "-fcrash-diagnostics-dir=" + clang_diagnostic_dir ] + if (save_reproducers_on_lld_crash && use_lld) { + ldflags += [ + "-fcrash-diagnostics=all", + "-fcrash-diagnostics-dir=" + clang_diagnostic_dir, + ] + } # TODO(hans): Remove this once Clang generates better optimized debug info # by default. https://crbug.com/765793 @@ -550,18 +566,19 @@ config("compiler") { ldflags += [ "-Wl,-mllvm,-instcombine-lower-dbg-declare=0" ] } } + + # TODO(crbug.com/1235145): Investigate why/if this should be needed. + if (is_win) { + cflags += [ "/clang:-ffp-contract=off" ] + } else { + cflags += [ "-ffp-contract=off" ] + } } # C11/C++11 compiler flags setup. # --------------------------- if (is_linux || is_chromeos || is_android || (is_nacl && is_clang) || current_os == "aix") { - if (target_os == "android") { - cxx11_override = use_cxx11_on_android - } else { - cxx11_override = use_cxx11 - } - if (is_clang) { standard_prefix = "c" @@ -585,53 +602,56 @@ config("compiler") { } cflags_c += [ "-std=${standard_prefix}11" ] - if (cxx11_override) { - # Override Chromium's default for projects that wish to stay on C++11. - cflags_cc += [ "-std=${standard_prefix}++11" ] - } else { + if (is_nacl && !is_nacl_saigo) { + # This is for the pnacl_newlib toolchain. It's only used to build + # a few independent ppapi test files that don't pull in any other + # dependencies. cflags_cc += [ "-std=${standard_prefix}++14" ] - } - } else if (!is_win && !is_nacl) { - if (target_os == "android") { - cxx11_override = use_cxx11_on_android + if (is_clang) { + cflags_cc += [ "-fno-trigraphs" ] + } + } else if (is_clang) { + if (use_cxx17) { + cflags_cc += [ "-std=${standard_prefix}++17" ] + } else { + cflags_cc += [ "-std=${standard_prefix}++20" ] + } } else { - cxx11_override = use_cxx11 - } - - # TODO(mcgrathr) - the NaCl GCC toolchain doesn't support either gnu11/gnu++11 - # or c11/c++11; we technically don't need this toolchain any more, but there - # are still a few buildbots using it, so until those are turned off - # we need the !is_nacl clause and the (is_nacl && is_clang) clause, above. + # The gcc bots are currently using GCC 9, which is not new enough to + # support "c++20"/"gnu++20". + cflags_cc += [ "-std=${standard_prefix}++2a" ] + } + } else if (is_win) { + cflags_c += [ "/std:c11" ] + if (use_cxx17 || (!is_clang && defined(msvc_use_cxx17) && msvc_use_cxx17)) { + cflags_cc += [ "/std:c++17" ] + } else { + cflags_cc += [ "/std:c++20" ] + } + } else if (!is_nacl) { + # TODO(mcgrathr) - the NaCl GCC toolchain doesn't support either + # gnu11/gnu++11 or c11/c++11; we technically don't need this toolchain any + # more, but there are still a few buildbots using it, so until those are + # turned off we need the !is_nacl clause and the (is_nacl && is_clang) + # clause, above. cflags_c += [ "-std=c11" ] - if (cxx11_override) { - cflags_cc += [ "-std=c++11" ] + + if (use_cxx17) { + cflags_cc += [ "-std=c++17" ] } else { - cflags_cc += [ "-std=c++14" ] + cflags_cc += [ "-std=c++20" ] } } - # C++17 removes trigraph support, so preemptively disable trigraphs. This is - # especially useful given the collision with ecmascript's logical assignment - # operators: https://github.com/tc39/proposal-logical-assignment - if (is_clang) { - # clang-cl disables trigraphs by default - if (!is_win) { - # The gnu variants of C++11 and C++14 already disable trigraph support, - # but when building with clang, we use -std=c++11 / -std=c++14, which - # enables trigraph support: override that here. - cflags_cc += [ "-fno-trigraphs" ] - } - - # Don't warn that trigraphs are ignored, since trigraphs are disabled - # anyway. + if (is_clang && current_os != "zos") { + # C++17 removes trigraph support, but clang still warns that it ignores + # them when seeing them. Don't. cflags_cc += [ "-Wno-trigraphs" ] } - if (is_mac) { - # The system libc++ on Mac doesn't have aligned allocation in C++17. - defines += [ "_LIBCPP_HAS_NO_ALIGNED_ALLOCATION" ] - cflags_cc += [ "-stdlib=libc++" ] - ldflags += [ "-stdlib=libc++" ] + if (use_relative_vtables_abi) { + cflags_cc += [ "-fexperimental-relative-c++-abi-vtables" ] + ldflags += [ "-fexperimental-relative-c++-abi-vtables" ] } # Add flags for link-time optimization. These flags enable @@ -654,9 +674,11 @@ config("compiler") { # available disk space, 40GB and 100000 files. cache_policy = "cache_size=10%:cache_size_bytes=40g:cache_size_files=100000" + # An import limit of 30 has better performance (per speedometer) and lower + # binary size than the default setting of 100. # TODO(gbiv): We ideally shouldn't need to specify this; ThinLTO # should be able to better manage binary size increases on its own. - import_instr_limit = 5 + import_instr_limit = 30 if (is_win) { ldflags += [ @@ -665,6 +687,7 @@ config("compiler") { "/lldltocache:" + rebase_path("$root_out_dir/thinlto-cache", root_build_dir), "/lldltocachepolicy:$cache_policy", + "-mllvm:-disable-auto-upgrade-debug-info", ] } else { ldflags += [ "-flto=thin" ] @@ -678,29 +701,45 @@ config("compiler") { # TODO(thakis): Check if '=0' (that is, number of cores, instead # of "all" which means number of hardware threads) is faster. ldflags += [ "-Wl,--thinlto-jobs=all" ] + if (is_apple) { + ldflags += [ + "-Wl,-cache_path_lto," + + rebase_path("$root_out_dir/thinlto-cache", root_build_dir), + "-Wcrl,object_path_lto", + ] + } else { + ldflags += + [ "-Wl,--thinlto-cache-dir=" + + rebase_path("$root_out_dir/thinlto-cache", root_build_dir) ] + } - ldflags += [ - "-Wl,--thinlto-cache-dir=" + - rebase_path("$root_out_dir/thinlto-cache", root_build_dir), - "-Wl,--thinlto-cache-policy,$cache_policy", - ] + ldflags += [ "-Wl,--thinlto-cache-policy=$cache_policy" ] - if (is_chromeos_ash) { - # Not much performance difference was noted between the default (100) - # and these. ARM was originally set lower than x86 to keep the size + if (is_chromeos) { + # ARM was originally set lower than x86 to keep the size # bloat of ThinLTO to <10%, but that's potentially no longer true. # FIXME(inglorion): maybe tune these? - if (target_cpu == "arm" || target_cpu == "arm64") { - import_instr_limit = 20 - } else { - import_instr_limit = 30 - } + # TODO(b/271459198): Revert limit on amd64 to 30 when fixed. + import_instr_limit = 20 + } else if (is_android) { + # TODO(crbug.com/1308318): Investigate if we can get the > 6% perf win + # of import_instr_limit 30 with a binary size hit smaller than ~2 MiB. + import_instr_limit = 5 } ldflags += [ "-Wl,-mllvm,-import-instr-limit=$import_instr_limit" ] + + if (!is_chromeos) { + # TODO(https://crbug.com/972449): turn on for ChromeOS when that + # toolchain has this flag. + # We only use one version of LLVM within a build so there's no need to + # upgrade debug info, which can be expensive since it runs the verifier. + ldflags += [ "-Wl,-mllvm,-disable-auto-upgrade-debug-info" ] + } } - # TODO(https://crbug.com/1211155): investigate why this isn't effective on arm32. + # TODO(https://crbug.com/1211155): investigate why this isn't effective on + # arm32. if (!is_android || current_cpu == "arm64") { cflags += [ "-fwhole-program-vtables" ] if (!is_win) { @@ -736,6 +775,16 @@ config("compiler") { ldflags += [ "-Wl,--no-rosegment" ] } + # TODO(crbug.com/1374347): Cleanup undefined symbol errors caught by + # --no-undefined-version. + if (use_lld && !is_win && !is_mac && !is_ios) { + ldflags += [ "-Wl,--undefined-version" ] + } + + if (use_lld && is_apple) { + ldflags += [ "-Wl,--strict-auto-link" ] + } + # LLD does call-graph-sorted binary layout by default when profile data is # present. On Android this increases binary size due to more thinks for long # jumps. Turn it off by default and enable selectively for targets where it's @@ -743,31 +792,63 @@ config("compiler") { if (use_lld && !enable_call_graph_profile_sort) { if (is_win) { ldflags += [ "/call-graph-profile-sort:no" ] - } else if (!is_apple) { - # TODO(thakis): Once LLD's Mach-O port basically works, implement call - # graph profile sorting for it, add an opt-out flag, and pass it here. + } else { ldflags += [ "-Wl,--no-call-graph-profile-sort" ] } } if (is_clang && !is_nacl && show_includes) { - assert(!is_win, "show_includes is not supported on Windows") - cflags += [ - "-H", - "-fshow-skipped-includes", - ] + if (is_win) { + # TODO(crbug.com/1223741): Goma mixes the -H and /showIncludes output. + assert(!use_goma, "show_includes on Windows is not reliable with goma") + cflags += [ + "/clang:-H", + "/clang:-fshow-skipped-includes", + ] + } else { + cflags += [ + "-H", + "-fshow-skipped-includes", + ] + } } # This flag enforces that member pointer base types are complete. It helps # prevent us from running into problems in the Microsoft C++ ABI (see # https://crbug.com/847724). - # TODO(crbug/1052397): Remove is_chromeos_lacros once lacros-chrome switches - # to target_os="chromeos". - if (is_clang && !is_nacl && target_os != "chromeos" && !use_xcode_clang && - !is_chromeos_lacros && (is_win || use_custom_libcxx)) { + if (is_clang && !is_nacl && target_os != "chromeos" && + (is_win || use_custom_libcxx)) { cflags += [ "-fcomplete-member-pointers" ] } + # Use DWARF simple template names, with the following exceptions: + # + # * Windows is not supported as it doesn't use DWARF. + # * Apple platforms (e.g. MacOS, iPhone, iPad) aren't supported because xcode + # lldb doesn't have the needed changes yet. + # TODO(crbug.com/1379070): Remove if the upstream default ever changes. + if (is_clang && !is_nacl && !is_win && !is_apple) { + cflags_cc += [ "-gsimple-template-names" ] + } + + # MLGO specific flags. These flags enable an ML-based inliner trained on + # Chrome on Android (arm32) with ThinLTO enabled, optimizing for size. + # The "release" ML model is embedded into clang as part of its build. + # Currently, the ML inliner is only enabled when targeting Android due to: + # a) Android is where size matters the most. + # b) MLGO presently has the limitation of only being able to embed one model + # at a time; It is unclear if the embedded model is beneficial for + # non-Android targets. + # MLGO is only officially supported on linux. + if (use_ml_inliner && is_a_target_toolchain) { + assert( + is_android && host_os == "linux", + "MLGO is currently only supported for targeting Android on a linux host") + if (use_thin_lto) { + ldflags += [ "-Wl,-mllvm,-enable-ml-inliner=release" ] + } + } + # Pass the same C/C++ flags to the objective C/C++ compiler. cflags_objc += cflags_c cflags_objcc += cflags_cc @@ -779,6 +860,69 @@ config("compiler") { asmflags += cflags asmflags += cflags_c } + + # Rust compiler flags setup. + # --------------------------- + rustflags = [ + # Overflow checks are optional in Rust, but even if switched + # off they do not cause undefined behavior (the overflowing + # behavior is defined). Because containers are bounds-checked + # in safe Rust, they also can't provoke buffer overflows. + # As such these checks may be less important in Rust than C++. + # But in (simplistic) testing they have negligible performance + # overhead, and this helps to provide consistent behavior + # between different configurations, so we'll keep them on until + # we discover a reason to turn them off. + "-Coverflow-checks=on", + + # By default Rust passes `-nodefaultlibs` to the linker, however this + # conflicts with our `--unwind=none` flag for Android dylibs, as the latter + # is then unused and produces a warning/error. So this removes the + # `-nodefaultlibs` from the linker invocation from Rust, which would be used + # to compile dylibs on Android, such as for constructing unit test APKs. + "-Cdefault-linker-libraries", + + # Require `unsafe` blocks even in `unsafe` fns. This is intended to become + # an error by default eventually; see + # https://github.com/rust-lang/rust/issues/71668 + "-Dunsafe_op_in_unsafe_fn", + + # To make Rust .d files compatible with ninja + "-Zdep-info-omit-d-target", + + # If a macro panics during compilation, show which macro and where it is + # defined. + "-Zmacro-backtrace", + + # For deterministic builds, keep the local machine's current working + # directory from appearing in build outputs. + "-Zremap-cwd-prefix=.", + ] + if (rust_abi_target != "") { + rustflags += [ "--target=$rust_abi_target" ] + } + if (!use_thin_lto) { + # Don't include bitcode if it won't be used. + rustflags += [ "-Cembed-bitcode=no" ] + } + if (is_official_build) { + rustflags += [ "-Ccodegen-units=1" ] + } +} + +# Defers LTO optimization to the linker, for use when: +# * Having the C++ toolchain do the linking against Rust staticlibs, and it +# will be using LTO. +# * Having Rust toolchain invoke the linker, and you're linking Rust and C++ +# together, so this defers LTO to the linker. +# +# Otherwise, Rust does LTO during compilation. +# +# https://doc.rust-lang.org/rustc/linker-plugin-lto.html +config("rust_defer_lto_to_linker") { + if (!is_debug && use_thin_lto && is_a_target_toolchain) { + rustflags = [ "-Clinker-plugin-lto" ] + } } # The BUILDCONFIG file sets this config on targets by default, which means when @@ -792,6 +936,8 @@ config("thinlto_optimize_default") { } else { ldflags = [ "-Wl,--lto-O" + lto_opt_level ] } + + rustflags = [ "-Clinker-plugin-lto=yes" ] } } @@ -816,6 +962,8 @@ config("thinlto_optimize_max") { } else { ldflags = [ "-Wl,--lto-O" + lto_opt_level ] } + + rustflags = [ "-Clinker-plugin-lto=yes" ] } } @@ -829,13 +977,23 @@ config("compiler_cpu_abi") { ldflags = [] defines = [] + configs = [] + if (is_chromeos) { + configs += [ "//build/config/chromeos:compiler_cpu_abi" ] + } + + # TODO(https://crbug.com/1383873): Remove this once figured out. + if (is_apple && current_cpu == "arm64") { + cflags += [ "-fno-global-isel" ] + ldflags += [ "-fno-global-isel" ] + } + if ((is_posix && !is_apple) || is_fuchsia) { # CPU architecture. We may or may not be doing a cross compile now, so for # simplicity we always explicitly set the architecture. if (current_cpu == "x64") { cflags += [ "-m64", - "-march=$x64_arch", "-msse3", ] ldflags += [ "-m64" ] @@ -849,7 +1007,8 @@ config("compiler_cpu_abi") { ] } } else if (current_cpu == "arm") { - if (is_clang && !is_android && !is_nacl) { + if (is_clang && !is_android && !is_nacl && + !(is_chromeos_lacros && is_chromeos_device)) { cflags += [ "--target=arm-linux-gnueabihf" ] ldflags += [ "--target=arm-linux-gnueabihf" ] } @@ -863,7 +1022,8 @@ config("compiler_cpu_abi") { cflags += [ "-mtune=$arm_tune" ] } } else if (current_cpu == "arm64") { - if (is_clang && !is_android && !is_nacl && !is_fuchsia) { + if (is_clang && !is_android && !is_nacl && !is_fuchsia && + !(is_chromeos_lacros && is_chromeos_device)) { cflags += [ "--target=aarch64-linux-gnu" ] ldflags += [ "--target=aarch64-linux-gnu" ] } @@ -1099,33 +1259,6 @@ config("compiler_cpu_abi") { ] ldflags += [ "-mips64r2" ] } - } else if (current_cpu == "pnacl" && is_nacl_nonsfi) { - if (target_cpu == "x86" || target_cpu == "x64") { - cflags += [ - "-arch", - "x86-32-nonsfi", - "--pnacl-bias=x86-32-nonsfi", - "--target=i686-unknown-nacl", - ] - ldflags += [ - "-arch", - "x86-32-nonsfi", - "--target=i686-unknown-nacl", - ] - } else if (target_cpu == "arm") { - cflags += [ - "-arch", - "arm-nonsfi", - "-mfloat-abi=hard", - "--pnacl-bias=arm-nonsfi", - "--target=armv7-unknown-nacl-gnueabihf", - ] - ldflags += [ - "-arch", - "arm-nonsfi", - "--target=armv7-unknown-nacl-gnueabihf", - ] - } } else if (current_cpu == "ppc64") { if (current_os == "aix") { cflags += [ "-maix64" ] @@ -1134,6 +1267,21 @@ config("compiler_cpu_abi") { cflags += [ "-m64" ] ldflags += [ "-m64" ] } + } else if (current_cpu == "riscv64") { + if (is_clang) { + cflags += [ "--target=riscv64-linux-gnu" ] + ldflags += [ "--target=riscv64-linux-gnu" ] + } + cflags += [ "-mabi=lp64d" ] + } else if (current_cpu == "loong64") { + if (is_clang) { + cflags += [ "--target=loongarch64-linux-gnu" ] + ldflags += [ "--target=loongarch64-linux-gnu" ] + } + cflags += [ + "-mabi=lp64d", + "-mcmodel=medium", + ] } else if (current_cpu == "s390x") { cflags += [ "-m64" ] ldflags += [ "-m64" ] @@ -1155,9 +1303,10 @@ config("compiler_codegen") { configs += [ "//build/config/nacl:compiler_codegen" ] } - if (current_cpu == "arm64" && is_android) { - # On arm64 disable outlining for Android. See crbug.com/931297 for more - # information. + if (current_cpu == "arm64" && !is_win && is_clang) { + # Disable outlining everywhere on arm64 except Win. For more information see + # crbug.com/931297 for Android and crbug.com/1410297 for iOS. + # TODO(crbug.com/1411363): Enable this on Windows if possible. cflags += [ "-mno-outline" ] # This can be removed once https://bugs.llvm.org/show_bug.cgi?id=40348 @@ -1179,6 +1328,7 @@ config("compiler_codegen") { config("compiler_deterministic") { cflags = [] ldflags = [] + swiftflags = [] # Eliminate build metadata (__DATE__, __TIME__ and __TIMESTAMP__) for # deterministic build. See https://crbug.com/314403 @@ -1207,12 +1357,20 @@ config("compiler_deterministic") { # different build directory like "out/feature_a" and "out/feature_b" if # we build same files with same compile flag. # Other paths are already given in relative, no need to normalize them. - cflags += [ - "-Xclang", - "-fdebug-compilation-dir", - "-Xclang", - ".", - ] + if (is_nacl) { + # TODO(https://crbug.com/1231236): Use -ffile-compilation-dir= here. + cflags += [ + "-Xclang", + "-fdebug-compilation-dir", + "-Xclang", + ".", + ] + } else { + # -ffile-compilation-dir is an alias for both -fdebug-compilation-dir= + # and -fcoverage-compilation-dir=. + cflags += [ "-ffile-compilation-dir=." ] + swiftflags += [ "-file-compilation-dir=." ] + } if (!is_win) { # We don't use clang -cc1as on Windows (yet? https://crbug.com/762167) asmflags = [ "-Wa,-fdebug-compilation-dir,." ] @@ -1235,8 +1393,18 @@ config("compiler_deterministic") { # Tells the compiler not to use absolute paths when passing the default # paths to the tools it invokes. We don't want this because we don't # really need it and it can mess up the goma cache entries. - if (is_clang && !is_nacl) { + if (is_clang && (!is_nacl || is_nacl_saigo)) { cflags += [ "-no-canonical-prefixes" ] + + # Same for links: Let the compiler driver invoke the linker + # with a relative path and pass relative paths to built-in + # libraries. Not needed on Windows because we call the linker + # directly there, not through the compiler driver. + # We don't link on goma, so this change is just for cleaner + # internal linker invocations, for people who work on the build. + if (!is_win) { + ldflags += [ "-no-canonical-prefixes" ] + } } } @@ -1262,6 +1430,18 @@ config("clang_revision") { } } +config("rustc_revision") { + if (rustc_revision != "") { + # Similar to the above config, this is here so that all files get recompiled + # after a rustc roll. Nothing should ever read this cfg. This will not be + # set if a custom toolchain is used. + rustflags = [ + "--cfg", + "cr_rustc_revision=\"$rustc_revision\"", + ] + } +} + config("compiler_arm_fpu") { if (current_cpu == "arm" && !is_ios && !is_nacl) { cflags = [ "-mfpu=$arm_fpu" ] @@ -1280,7 +1460,7 @@ config("compiler_arm_thumb") { } config("compiler_arm") { - if (current_cpu == "arm" && (is_chromeos_ash || is_chromeos_lacros)) { + if (current_cpu == "arm" && is_chromeos) { # arm is normally the default mode for clang, but on chromeos a wrapper # is used to pass -mthumb, and therefor change the default. cflags = [ "-marm" ] @@ -1321,6 +1501,9 @@ config("runtime_library") { configs += [ "//build/config/win:runtime_library" ] } else if (is_linux || is_chromeos) { configs += [ "//build/config/linux:runtime_library" ] + if (is_chromeos) { + configs += [ "//build/config/chromeos:runtime_library" ] + } } else if (is_ios) { configs += [ "//build/config/ios:runtime_library" ] } else if (is_mac) { @@ -1350,160 +1533,25 @@ config("default_warnings") { cflags += [ "/WX" ] } if (fatal_linker_warnings) { + arflags = [ "/WX" ] ldflags = [ "/WX" ] } + defines = [ + # Without this, Windows headers warn that functions like wcsnicmp + # should be spelled _wcsnicmp. But all other platforms keep spelling + # it wcsnicmp, making this warning unhelpful. We don't want it. + "_CRT_NONSTDC_NO_WARNINGS", - cflags += [ - # Warnings permanently disabled: - - # C4091: 'typedef ': ignored on left of 'X' when no variable is - # declared. - # This happens in a number of Windows headers. Dumb. - "/wd4091", - - # C4127: conditional expression is constant - # This warning can in theory catch dead code and other problems, but - # triggers in far too many desirable cases where the conditional - # expression is either set by macros or corresponds some legitimate - # compile-time constant expression (due to constant template args, - # conditionals comparing the sizes of different types, etc.). Some of - # these can be worked around, but it's not worth it. - "/wd4127", - - # C4251: 'identifier' : class 'type' needs to have dll-interface to be - # used by clients of class 'type2' - # This is necessary for the shared library build. - "/wd4251", - - # C4275: non dll-interface class used as base for dll-interface class - # This points out a potential (but rare) problem with referencing static - # fields of a non-exported base, through the base's non-exported inline - # functions, or directly. The warning is subtle enough that people just - # suppressed it when they saw it, so it's not worth it. - "/wd4275", - - # C4312 is a VS 2015 64-bit warning for integer to larger pointer. - # TODO(brucedawson): fix warnings, crbug.com/554200 - "/wd4312", - - # C4324 warns when padding is added to fulfill alignas requirements, - # but can trigger in benign cases that are difficult to individually - # suppress. - "/wd4324", - - # C4351: new behavior: elements of array 'array' will be default - # initialized - # This is a silly "warning" that basically just alerts you that the - # compiler is going to actually follow the language spec like it's - # supposed to, instead of not following it like old buggy versions did. - # There's absolutely no reason to turn this on. - "/wd4351", - - # C4355: 'this': used in base member initializer list - # It's commonly useful to pass |this| to objects in a class' initializer - # list. While this warning can catch real bugs, most of the time the - # constructors in question don't attempt to call methods on the passed-in - # pointer (until later), and annotating every legit usage of this is - # simply more hassle than the warning is worth. - "/wd4355", - - # C4503: 'identifier': decorated name length exceeded, name was - # truncated - # This only means that some long error messages might have truncated - # identifiers in the presence of lots of templates. It has no effect on - # program correctness and there's no real reason to waste time trying to - # prevent it. - "/wd4503", - - # Warning C4589 says: "Constructor of abstract class ignores - # initializer for virtual base class." Disable this warning because it - # is flaky in VS 2015 RTM. It triggers on compiler generated - # copy-constructors in some cases. - "/wd4589", - - # C4611: interaction between 'function' and C++ object destruction is - # non-portable - # This warning is unavoidable when using e.g. setjmp/longjmp. MSDN - # suggests using exceptions instead of setjmp/longjmp for C++, but - # Chromium code compiles without exception support. We therefore have to - # use setjmp/longjmp for e.g. JPEG decode error handling, which means we - # have to turn off this warning (and be careful about how object - # destruction happens in such cases). - "/wd4611", - - # Warnings to evaluate and possibly fix/reenable later: - - "/wd4100", # Unreferenced formal function parameter. - "/wd4121", # Alignment of a member was sensitive to packing. - "/wd4244", # Conversion: possible loss of data. - "/wd4505", # Unreferenced local function has been removed. - "/wd4510", # Default constructor could not be generated. - "/wd4512", # Assignment operator could not be generated. - "/wd4610", # Class can never be instantiated, constructor required. - "/wd4838", # Narrowing conversion. Doesn't seem to be very useful. - "/wd4995", # 'X': name was marked as #pragma deprecated - "/wd4996", # Deprecated function warning. - - # These are variable shadowing warnings that are new in VS2015. We - # should work through these at some point -- they may be removed from - # the RTM release in the /W4 set. - "/wd4456", - "/wd4457", - "/wd4458", - "/wd4459", - - # All of our compilers support the extensions below. - "/wd4200", # nonstandard extension used: zero-sized array in struct/union - "/wd4201", # nonstandard extension used: nameless struct/union - "/wd4204", # nonstandard extension used : non-constant aggregate - # initializer - - "/wd4221", # nonstandard extension used : 'identifier' : cannot be - # initialized using address of automatic variable - - # http://crbug.com/588506 - Conversion suppressions waiting on Clang - # -Wconversion. - "/wd4245", # 'conversion' : conversion from 'type1' to 'type2', - # signed/unsigned mismatch - - "/wd4267", # 'var' : conversion from 'size_t' to 'type', possible loss of - # data - - "/wd4305", # 'identifier' : truncation from 'type1' to 'type2' - "/wd4389", # 'operator' : signed/unsigned mismatch - - "/wd4702", # unreachable code - - # http://crbug.com/848979 - MSVC is more conservative than Clang with - # regards to variables initialized and consumed in different branches. - "/wd4701", # Potentially uninitialized local variable 'name' used - "/wd4703", # Potentially uninitialized local pointer variable 'name' used - - # http://crbug.com/848979 - Remaining Clang permitted warnings. - "/wd4661", # 'identifier' : no suitable definition provided for explicit - # template instantiation request - - "/wd4706", # assignment within conditional expression - # MSVC is stricter and requires a boolean expression. - - "/wd4715", # 'function' : not all control paths return a value' - # MSVC does not analyze switch (enum) for completeness. - ] - - cflags_cc += [ - # Allow "noexcept" annotations even though we compile with exceptions - # disabled. - "/wd4577", + # TODO(thakis): winsock wants us to use getaddrinfo instead of + # gethostbyname. Fires mostly in non-Chromium code. We probably + # want to remove this define eventually. + "_WINSOCK_DEPRECATED_NO_WARNINGS", ] - - if (current_cpu == "x86") { - cflags += [ - # VC++ 2015 changes 32-bit size_t truncation warnings from 4244 to - # 4267. Example: short TruncTest(size_t x) { return x; } - # Since we disable 4244 we need to disable 4267 during migration. - # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. - "/wd4267", - ] + if (!is_clang) { + # TODO(thakis): Remove this once + # https://swiftshader-review.googlesource.com/c/SwiftShader/+/57968 has + # rolled into angle. + cflags += [ "/wd4244" ] } } else { if (is_apple && !is_nacl) { @@ -1520,7 +1568,7 @@ config("default_warnings") { # Suppress warnings about ABI changes on ARM (Clang doesn't give this # warning). - if (current_cpu == "arm" && !is_clang) { + if (!is_starboard && current_cpu == "arm" && !is_clang) { cflags += [ "-Wno-psabi" ] } @@ -1564,86 +1612,113 @@ config("default_warnings") { "-Wno-missing-field-initializers", # "struct foo f = {0};" "-Wno-unused-parameter", # Unused function parameters. ] + + if (!is_starboard && (!is_nacl || is_nacl_saigo)) { + cflags += [ + # An ABI compat warning we don't care about, https://crbug.com/1102157 + # TODO(thakis): Push this to the (few) targets that need it, + # instead of having a global flag. + "-Wno-psabi", + ] + } } if (is_clang) { cflags += [ - # TODO(thakis): Consider -Wloop-analysis (turns on - # -Wrange-loop-analysis too). - - # This warns on using ints as initializers for floats in - # initializer lists (e.g. |int a = f(); CGSize s = { a, a };|), - # which happens in several places in chrome code. Not sure if - # this is worth fixing. - "-Wno-c++11-narrowing", + "-Wloop-analysis", # TODO(thakis): This used to be implied by -Wno-unused-function, # which we no longer use. Check if it makes sense to remove # this as well. http://crbug.com/316352 "-Wno-unneeded-internal-declaration", + + "-Wno-extra-semi", + "-Wno-pessimizing-move", + "-Wno-shadow", ] - # use_xcode_clang only refers to the iOS toolchain, host binaries use - # chromium's clang always. - if (!is_nacl) { + if (use_cobalt_customizations) { cflags += [ - # TODO(thakis): https://crbug.com/604888 - "-Wno-undefined-var-template", + "-Wno-range-loop-bind-reference", + "-Wno-range-loop-construct", ] + } + if (!is_nacl || is_nacl_saigo) { if (is_win) { # TODO(thakis): https://crbug.com/617318 # Currently goma can not handle case sensitiveness for windows well. cflags += [ "-Wno-nonportable-include-path" ] } - if ((current_toolchain == host_toolchain || !use_xcode_clang) && - !using_old_compiler) { - # Flags NaCl (Clang 3.7) and Xcode 9.2 (Clang clang-900.0.39.2) do not - # recognize. + if (!use_cobalt_customizations) { cflags += [ + "-Wenum-compare-conditional", + # Ignore warnings about MSVC optimization pragmas. # TODO(thakis): Only for no_chromium_code? http://crbug.com/912662 "-Wno-ignored-pragma-optimize", ] - if (!use_cobalt_customizations) { - cflags += [ - # An ABI compat warning we don't care about, https://crbug.com/1102157 - # TODO(thakis): Push this to the (few) targets that need it, - # instead of having a global flag. - "-Wno-psabi", - - # TODO(https://crbug.com/989932): Evaluate and possibly enable. - "-Wno-implicit-int-float-conversion", - - # TODO(https://crbug.com/999886): Clean up, enable. - "-Wno-final-dtor-non-final-class", + } - # TODO(https://crbug.com/1016945) Clean up, enable. - "-Wno-builtin-assume-aligned-alignment", + if (!use_cobalt_customizations && !is_nacl) { + cflags += [ + # TODO(crbug.com/1343975) Evaluate and possibly enable. + "-Wno-deprecated-builtins", - # TODO(https://crbug.com/1028110): Evaluate and possible enable. - "-Wno-deprecated-copy", + # TODO(crbug.com/1352183) Evaluate and possibly enable. + "-Wno-bitfield-constant-conversion", - # TODO(https://crbug.com/1050281): Clean up, enable. - "-Wno-non-c-typedef-for-linkage", + # TODO(crbug.com/1412713) Evaluate and possibly enable. + "-Wno-deprecated-this-capture", ] - } + } + } + } +} - cflags_c += [ - # TODO(https://crbug.com/995993): Clean up and enable. - "-Wno-implicit-fallthrough", - ] +# prevent_unsafe_narrowing ---------------------------------------------------- +# +# Warnings that prevent narrowing or comparisons of integer types that are +# likely to cause out-of-bound read/writes or Undefined Behaviour. In +# particular, size_t is used for memory sizes, allocation, indexing, and +# offsets. Using other integer types along with size_t produces risk of +# memory-safety bugs and thus security exploits. +# +# In order to prevent these bugs, allocation sizes were historically limited to +# sizes that can be represented within 31 bits of information, allowing `int` to +# be safely misused instead of `size_t` (https://crbug.com/169327). In order to +# support increasing the allocation limit we require strictly adherence to +# using the correct types, avoiding lossy conversions, and preventing overflow. +# To do so, enable this config and fix errors by converting types to be +# `size_t`, which is both large enough and unsigned, when dealing with memory +# sizes, allocations, indices, or offsets.In cases where type conversion is not +# possible or is superfluous, use base::strict_cast<> or base::checked_cast<> +# to convert to size_t as needed. +# See also: https://docs.google.com/document/d/1CTbQ-5cQjnjU8aCOtLiA7G6P0i5C6HpSDNlSNq6nl5E +# +# To enable in a GN target, use: +# configs += [ "//build/config/compiler:prevent_unsafe_narrowing" ] - if (!use_cobalt_customizations) { - if (enable_wmax_tokens) { - cflags += [ "-Wmax-tokens" ] - } else { - # TODO(https://crbug.com/1049569): Remove after Clang 87b235db. - cflags += [ "-Wno-max-tokens" ] - } - } - } +config("prevent_unsafe_narrowing") { + cflags = [] + if (is_clang) { + cflags += [ + "-Wimplicit-int-conversion", + "-Wsign-compare", + "-Wsign-conversion", + ] + if (!is_starboard) { + cflags += [ + "-Wshorten-64-to-32", + ] + } + if (!is_nacl) { + cflags += [ + # Avoid bugs of the form `if (size_t i = size; i >= 0; --i)` while + # fixing types to be sign-correct. + "-Wtautological-unsigned-zero-compare", + ] } } } @@ -1659,12 +1734,12 @@ config("chromium_code") { # The platform should set warning flags. cflags = [] } else { - cflags = [ "/W4" ] # Warning level 4. - } + if (is_clang) { + cflags = [ "/W4" ] # Warning level 4. - if (is_clang) { - # Opt in to additional [[nodiscard]] on standard library methods. - defines = [ "_HAS_NODISCARD" ] + # Opt in to additional [[nodiscard]] on standard library methods. + defines = [ "_HAS_NODISCARD" ] + } } } else if (is_starboard) { # TODO(b/205790602): Revisit this code to be more compatible with platforms. @@ -1688,6 +1763,15 @@ config("chromium_code") { cflags += [ "-Wextra" ] } + if (treat_warnings_as_errors) { + # Turn rustc warnings into the "deny" lint level, which produce compiler + # errors. The equivalent of -Werror for clang/gcc. + # + # Note we apply the actual lint flags in config("compiler"). All warnings + # are suppressed in third-party crates. + rustflags = [ "-Dwarnings" ] + } + # In Chromium code, we define __STDC_foo_MACROS in order to get the # C99 macros on Mac and Linux. defines = [ @@ -1697,61 +1781,83 @@ config("chromium_code") { if (!is_debug && !using_sanitizer && current_cpu != "s390x" && current_cpu != "s390" && current_cpu != "ppc64" && - current_cpu != "mips" && current_cpu != "mips64") { + current_cpu != "mips" && current_cpu != "mips64" && + current_cpu != "riscv64" && current_cpu != "loong64") { # Non-chromium code is not guaranteed to compile cleanly with # _FORTIFY_SOURCE. Also, fortified build may fail when optimizations are # disabled, so only do that for Release build. defines += [ "_FORTIFY_SOURCE=2" ] } - if (is_mac) { - cflags_objc = [ "-Wobjc-missing-property-synthesis" ] - cflags_objcc = [ "-Wobjc-missing-property-synthesis" ] + if (is_apple) { + cflags_objc = [ "-Wimplicit-retain-self" ] + cflags_objcc = [ "-Wimplicit-retain-self" ] } - if (is_ios) { - cflags_objc = [ "-Wimplicit-retain-self" ] - cflags_objcc = cflags_objc + if (is_mac) { + cflags_objc += [ "-Wobjc-missing-property-synthesis" ] + cflags_objcc += [ "-Wobjc-missing-property-synthesis" ] } } if (is_clang) { cflags += [ # Warn on missing break statements at the end of switch cases. - # For intentional fallthrough, use FALLTHROUGH; from - # base/compiler_specific.h + # For intentional fallthrough, use [[fallthrough]]. "-Wimplicit-fallthrough", + ] + if (!is_starboard) { + cflags += [ + # Warn on unnecessary extra semicolons outside of function definitions. + "-Wextra-semi", + ] + } + + # Suppress warning in old //net. + if (is_starboard) { + cflags += [ + "-Wno-reorder-ctor", + "-Wno-unused-const-variable", + "-Wno-unused-variable", + "-Wno-unused-private-field", + "-Wno-missing-braces", + "-Wno-string-concatenation", + ] + } + + # Suppress warnings in old //base and //net. + if (is_starboard) { + cflags += [ + "-Wno-sign-compare", + "-Wno-shorten-64-to-32", + ] + } + # TODO(thakis): Enable this more often, https://crbug.com/346399 - # use_libfuzzer: https://crbug.com/1063180 - if (!is_starboard && !is_nacl && !use_libfuzzer) { - cflags += [ "-Wunreachable-code" ] + # use_fuzzing_engine_with_lpm: https://crbug.com/1063180 + if (!is_starboard && (!is_nacl || is_nacl_saigo) && !use_fuzzing_engine_with_lpm) { + cflags += [ "-Wunreachable-code-aggressive" ] } # Thread safety analysis is broken under nacl: https://crbug.com/982423. - if (!is_nacl) { + if (!is_nacl || is_nacl_saigo) { cflags += [ # Thread safety analysis. See base/thread_annotations.h and # https://clang.llvm.org/docs/ThreadSafetyAnalysis.html "-Wthread-safety", ] } - - # TODO(thakis): Enable this for more platforms, https://crbug.com/926235 - # ChromeOS: http://crbug.com/940863 - # Chromecast: http://crbug.com/942554 - has_dchecks = is_debug || dcheck_always_on - if (!has_dchecks && is_chromeos_ash && is_chrome_branded) { - # Temporarily disable -Wextra-semi for Chrome on Chrome OS. - } else if (is_chromecast && chromecast_branding != "public") { - # Temporarily disable -Wextra-semi for Chromecast. - } else if (!is_starboard) { - cflags += [ "-Wextra-semi" ] - } } configs = [ ":default_warnings" ] + + if (!is_starboard) { + configs += [ + ":noshadowing", + ] + } } config("no_chromium_code") { @@ -1760,19 +1866,12 @@ config("no_chromium_code") { defines = [] if (is_win) { + if (!is_starboard && is_clang) { + cflags += [ "/W3" ] # Warning level 3. + } cflags += [ - "/W3", # Warning level 3. "/wd4800", # Disable warning when forcing value to bool. "/wd4267", # TODO(jschuh): size_t to int. - "/wd4996", # Deprecated function warning. - ] - if (is_starboard) { - # The platform should set warning flags. - cflags -= [ "/W3" ] - } - defines += [ - "_CRT_NONSTDC_NO_WARNINGS", - "_CRT_NONSTDC_NO_DEPRECATE", ] } else { # GCC may emit unsuppressible warnings so don't add -Werror for no chromium @@ -1793,16 +1892,27 @@ config("no_chromium_code") { # Lots of third-party libraries have unused variables. Instead of # suppressing them individually, we just blanket suppress them here. "-Wno-unused-variable", + + # Similarly, we're not going to fix all the C++11 narrowing issues in + # third-party libraries. + "-Wno-c++11-narrowing", ] if (!use_cobalt_customizations && !is_nacl && (current_toolchain == host_toolchain || !use_xcode_clang)) { cflags += [ + # Disabled for similar reasons as -Wunused-variable. + "-Wno-unused-but-set-variable", + # TODO(https://crbug.com/1202159): Clean up and enable. "-Wno-misleading-indentation", ] } } + # Suppress all warnings in third party, as Cargo does: + # https://doc.rust-lang.org/rustc/lints/levels.html#capping-lints + rustflags = [ "--cap-lints=allow" ] + configs = [ ":default_warnings" ] } @@ -1813,7 +1923,7 @@ config("no_chromium_code") { config("noshadowing") { # This flag has to be disabled for nacl because the nacl compiler is too # strict about shadowing. - if (is_clang && !is_nacl) { + if (is_clang && (!is_nacl || is_nacl_saigo)) { cflags = [ "-Wshadow" ] } } @@ -1873,7 +1983,9 @@ config("export_dynamic") { config("thin_archive") { # The macOS and iOS default linker ld64 does not support reading thin # archives. - if ((is_posix && !is_nacl && (!is_apple || use_lld)) || is_fuchsia) { + # TODO(crbug.com/1221615): Enable on is_apple if use_lld once that no longer + # confuses lldb. + if ((is_posix && !is_nacl && !is_apple) || is_fuchsia) { arflags = [ "-T" ] } else if (is_win && use_lld) { arflags = [ "/llvmlibthin" ] @@ -2065,7 +2177,7 @@ if (is_win) { "-Wl,-no_function_starts", ] } - } else if (current_os != "aix") { + } else if (current_os != "aix" && current_os != "zos") { # Non-Mac Posix flags. # Aix does not support these. @@ -2075,6 +2187,11 @@ if (is_win) { "-fdata-sections", "-ffunction-sections", ] + if ((!is_nacl || is_nacl_saigo) && is_clang) { + # We don't care about unique section names, this makes object files a bit + # smaller. + common_optimize_on_cflags += [ "-fno-unique-section-names" ] + } common_optimize_on_ldflags += [ # Specifically tell the linker to perform optimizations. @@ -2087,7 +2204,7 @@ if (is_win) { } config("default_stack_frames") { - if (is_posix || is_fuchsia) { + if (!is_win) { if (enable_frame_pointers) { cflags = [ "-fno-omit-frame-pointer" ] @@ -2119,21 +2236,36 @@ config("optimize") { # Favor size over speed, /O1 must be before the common flags. # /O1 implies /Os and /GF. cflags = [ "/O1" ] + common_optimize_on_cflags + [ "/Oi" ] + rustflags = [ "-Copt-level=s" ] } else { # PGO requires all translation units to be compiled with /O2. The actual # optimization level will be decided based on the profiling data. cflags = [ "/O2" ] + common_optimize_on_cflags + [ "/Oi" ] + + # https://doc.rust-lang.org/rustc/profile-guided-optimization.html#usage + # suggests not using an explicit `-Copt-level` at all, and the default is + # to optimize for performance like `/O2` for clang. + rustflags = [] } - } else if (optimize_for_size && !is_nacl) { + } else if (optimize_for_size) { # Favor size over speed. - # TODO(crbug.com/718650): Fix -Os in PNaCl compiler and remove the is_nacl - # guard above. if (is_clang) { cflags = [ "-Oz" ] + common_optimize_on_cflags + + if (use_ml_inliner && is_a_target_toolchain) { + cflags += [ + "-mllvm", + "-enable-ml-inliner=release", + ] + } } else { cflags = [ "-Os" ] + common_optimize_on_cflags } - } else if (is_chromeos_ash) { + + # Like with `-Oz` on Clang, `-Copt-level=z` will also turn off loop + # vectorization. + rustflags = [ "-Copt-level=z" ] + } else if (is_chromeos) { # TODO(gbiv): This is partially favoring size over speed. CrOS exclusively # uses clang, and -Os in clang is more of a size-conscious -O2 than "size at # any cost" (AKA -Oz). It'd be nice to: @@ -2141,8 +2273,17 @@ config("optimize") { # for size by default (so, also Windows) # - Investigate -Oz here, maybe just for ARM? cflags = [ "-Os" ] + common_optimize_on_cflags + + # Similar to clang, we optimize with `-Copt-level=s` to keep loop + # vectorization while otherwise optimizing for size. + rustflags = [ "-Copt-level=s" ] } else { cflags = [ "-O2" ] + common_optimize_on_cflags + + # The `-O3` for clang turns on extra optimizations compared to the standard + # `-O2`. But for rust, `-Copt-level=3` is the default and is thus reliable + # to use. + rustflags = [ "-Copt-level=3" ] } ldflags = common_optimize_on_ldflags } @@ -2210,6 +2351,7 @@ config("optimize_max") { } else { cflags = [ "-O2" ] + common_optimize_on_cflags } + rustflags = [ "-Copt-level=3" ] } } @@ -2242,11 +2384,13 @@ config("optimize_speed") { } else { cflags = [ "-O3" ] + common_optimize_on_cflags } + rustflags = [ "-Copt-level=3" ] } } config("optimize_fuzzing") { cflags = [ "-O1" ] + common_optimize_on_cflags + rustflags = [ "-Copt-level=1" ] ldflags = common_optimize_on_ldflags visibility = [ ":default_optimization" ] } @@ -2281,14 +2425,17 @@ if (is_clang && is_a_target_toolchain) { } else if (clang_use_default_sample_profile) { assert(build_with_chromium, "Our default profiles currently only apply to Chromium") - assert(is_android || is_chromeos_lacros || is_chromeos_ash || is_chromecast, + assert(is_android || is_chromeos || is_castos, "The current platform has no default profile") - if (is_android || is_chromecast) { + if (is_android || is_castos) { _clang_sample_profile = "//chrome/android/profiles/afdo.prof" } else { - assert(chromeos_afdo_platform == "atom" || - chromeos_afdo_platform == "bigcore", - "Only atom and bigcore are valid Chrome OS profiles.") + assert( + chromeos_afdo_platform == "atom" || + chromeos_afdo_platform == "bigcore" || + chromeos_afdo_platform == "arm" || + chromeos_afdo_platform == "arm-exp", + "Only 'atom', 'bigcore', 'arm' and 'arm-exp' are valid ChromeOS profiles.") _clang_sample_profile = "//chromeos/profiles/${chromeos_afdo_platform}.afdo.prof" } @@ -2306,8 +2453,7 @@ config("afdo_optimize_size") { } # GCC and clang support a form of profile-guided optimization called AFDO. -# There are some targeted places that AFDO regresses (and an icky interaction -# between //base/allocator:tcmalloc and AFDO on GCC), so we provide a separate +# There are some targeted places that AFDO regresses, so we provide a separate # config to allow AFDO to be disabled per-target. config("afdo") { if (is_clang) { @@ -2324,6 +2470,9 @@ config("afdo") { rebased_clang_sample_profile = rebase_path(_clang_sample_profile, root_build_dir) cflags += [ "-fprofile-sample-use=${rebased_clang_sample_profile}" ] + if (use_profi) { + cflags += [ "-fsample-profile-use-profi" ] + } inputs = [ _clang_sample_profile ] } } else if (auto_profile_path != "" && is_a_target_toolchain) { @@ -2365,9 +2514,17 @@ config("win_pdbaltpath") { # Full symbols. config("symbols") { + rustflags = [] if (is_win) { if (is_clang) { - cflags = [ "/Z7" ] # Debug information in the .obj files. + cflags = [ + # Debug information in the .obj files. + "/Z7", + + # Disable putting the compiler command line into the debug info to + # prevent some types of non-determinism. + "-gno-codeview-command-line", + ] } else { cflags = [ "/Zi" ] # Produce PDB file, no edit and continue. } @@ -2381,9 +2538,6 @@ config("symbols") { # All configs using /DEBUG should include this: configs = [ ":win_pdbaltpath" ] - - # TODO(crbug.com/1138553): Re-enable constructor homing on windows after - # libc++ fix is in. } else { cflags = [] if (is_mac && enable_dsyms) { @@ -2393,33 +2547,41 @@ config("symbols") { # version 7 also produces debug data that is incompatible with Breakpad # dump_syms, so this is still required (https://crbug.com/622406). cflags += [ "-fno-standalone-debug" ] - } else if (is_mac && !use_dwarf5) { - # clang defaults to DWARF2 on macOS unless mac_deployment_target is - # at least 10.11. - # TODO(thakis): Remove this once mac_deployment_target is 10.11. - cflags += [ "-gdwarf-4" ] } - if (use_dwarf5 && !is_nacl) { - cflags += [ "-gdwarf-5" ] + # On aix -gdwarf causes linker failures due to thread_local variables. + if (!is_nacl && current_os != "aix") { + if (use_dwarf5) { + cflags += [ "-gdwarf-5" ] + rustflags += [ "-Zdwarf-version=5" ] + } else if (!is_apple) { + # Recent clang versions default to DWARF5 on Linux, and Android is about + # to switch. TODO: Adopt that in controlled way. + # Apple platforms still default to 4, so the flag is not needed there. + cflags += [ "-gdwarf-4" ] + rustflags += [ "-Zdwarf-version=4" ] + } } # The gcc-based nacl compilers don't support -fdebug-compilation-dir (see # elsewhere in this file), so they can't have build-dir-independent output. + # Moreover pnacl does not support newer flags such as -fdebug-prefix-map # Disable symbols for nacl object files to get deterministic, - # build-directory-independent output. pnacl and nacl-clang do support that - # flag, so we can use use -g1 for pnacl and nacl-clang compiles. - # gcc nacl is is_nacl && !is_clang, pnacl and nacl-clang are && is_clang. - if (!is_nacl || is_clang) { + # build-directory-independent output. + # Keeping -g2 for saigo as it's the only toolchain whose artifacts that are + # part of chromium release (other nacl toolchains are used only for tests). + if ((!is_nacl || is_nacl_saigo) && current_os != "zos") { cflags += [ "-g2" ] } - # TODO(https://crbug.com/1050118): Investigate missing debug info on mac. - if (is_clang && !is_nacl && !use_xcode_clang && !is_apple) { - cflags += [ - "-Xclang", - "-debug-info-kind=constructor", - ] + if (!is_nacl && is_clang && !is_tsan && !is_asan) { + # gcc generates dwarf-aranges by default on -g1 and -g2. On clang it has + # to be manually enabled. + # + # It is skipped in tsan and asan because enabling it causes some + # formatting changes in the output which would require fixing bunches + # of expectation regexps. + cflags += [ "-gdwarf-aranges" ] } if (is_apple) { @@ -2443,8 +2605,14 @@ config("symbols") { # obj/native_client/src/trusted/service_runtime/sel_asm/nacl_switch_32.o: # DWARF info may be corrupt; offsets in a range list entry are in different # sections" there. Maybe just a bug in nacl_switch_32.S. - if (!is_apple && !is_nacl && current_cpu != "x86" && - (use_gold || use_lld)) { + _enable_gdb_index = + symbol_level == 2 && !is_apple && !is_nacl && current_cpu != "x86" && + current_os != "zos" && (use_gold || use_lld) && + # Disable on non-fission 32-bit Android because it pushes + # libcomponents_unittests over the 4gb size limit. + !(is_android && !use_debug_fission && current_cpu != "x64" && + current_cpu != "arm64") + if (_enable_gdb_index) { if (is_clang) { # This flag enables the GNU-format pubnames and pubtypes sections, # which lld needs in order to generate a correct GDB index. @@ -2455,6 +2623,34 @@ config("symbols") { ldflags += [ "-Wl,--gdb-index" ] } } + + configs = [] + + # Compress debug on 32-bit ARM to stay under 4GB for ChromeOS + # https://b/243982712. + if (symbol_level == 2 && is_chromeos_device && !use_debug_fission && + !is_nacl && current_cpu == "arm") { + configs += [ "//build/config:compress_debug_sections" ] + } + + if (is_clang && (!is_nacl || is_nacl_saigo) && current_os != "zos") { + if (is_apple) { + # TODO(https://crbug.com/1050118): Investigate missing debug info on mac. + # Make sure we don't use constructor homing on mac. + cflags += [ + "-Xclang", + "-debug-info-kind=limited", + ] + } else { + # Use constructor homing for debug info. This option reduces debug info + # by emitting class type info only when constructors are emitted. + cflags += [ + "-Xclang", + "-fuse-ctor-homing", + ] + } + } + rustflags += [ "-g" ] } # Minimal symbols. @@ -2488,6 +2684,11 @@ config("minimal_symbols") { # at least 10.11. # TODO(thakis): Remove this once mac_deployment_target is 10.11. cflags += [ "-gdwarf-4" ] + } else if (!use_dwarf5 && !is_nacl && current_os != "aix") { + # On aix -gdwarf causes linker failures due to thread_local variables. + # Recent clang versions default to DWARF5 on Linux, and Android is about + # to switch. TODO: Adopt that in controlled way. + cflags += [ "-gdwarf-4" ] } if (use_dwarf5 && !is_nacl) { @@ -2496,30 +2697,34 @@ config("minimal_symbols") { # The gcc-based nacl compilers don't support -fdebug-compilation-dir (see # elsewhere in this file), so they can't have build-dir-independent output. + # Moreover pnacl does not support newer flags such as -fdebug-prefix-map # Disable symbols for nacl object files to get deterministic, - # build-directory-independent output. pnacl and nacl-clang do support that - # flag, so we can use use -g1 for pnacl and nacl-clang compiles. - # gcc nacl is is_nacl && !is_clang, pnacl and nacl-clang are && is_clang. - if (!is_nacl || is_clang) { + # build-directory-independent output. + # Keeping -g1 for saigo as it's the only toolchain whose artifacts that are + # part of chromium release (other nacl toolchains are used only for tests). + if (!is_nacl || is_nacl_saigo) { cflags += [ "-g1" ] } + + if (!is_nacl && is_clang && !is_tsan && !is_asan) { + # See comment for -gdwarf-aranges in config("symbols"). + cflags += [ "-gdwarf-aranges" ] + } + ldflags = [] if (is_android && is_clang) { - # Android defaults to symbol_level=1 builds in production builds - # (https://crbug.com/648948), but clang, unlike gcc, doesn't emit - # DW_AT_linkage_name in -g1 builds. -fdebug-info-for-profiling enables - # that (and a bunch of other things we don't need), so that we get - # qualified names in stacks. + # Android defaults to symbol_level=1 builds, but clang, unlike gcc, + # doesn't emit DW_AT_linkage_name in -g1 builds. + # -fdebug-info-for-profiling enables that (and a bunch of other things we + # don't need), so that we get qualified names in stacks. # TODO(thakis): Consider making clang emit DW_AT_linkage_name in -g1 mode; # failing that consider doing this on non-Android too. cflags += [ "-fdebug-info-for-profiling" ] } - # Note: debug_fission is no-op with symbol_level=1 since all -g1 debug_info - # will stay in the executable. - asmflags = cflags } + rustflags = [ "-Cdebuginfo=1" ] } # This configuration contains function names only. That is, the compiler is @@ -2587,9 +2792,12 @@ if (is_chromeos_ash && is_chromeos_device) { if (is_android || (is_chromeos_ash && is_chromeos_device)) { # Use orderfile for linking Chrome on Android and Chrome OS. # This config enables using an orderfile for linking in LLD. - # TODO: Consider using call graph sort instead, at least on Android. config("chrome_orderfile_config") { - if (chrome_orderfile_path != "" && !enable_call_graph_profile_sort) { + # Don't try to use an orderfile with call graph sorting, except on Android, + # where we care about memory used by code, so we still want to mandate + # ordering. + if (chrome_orderfile_path != "" && + (is_android || !enable_call_graph_profile_sort)) { assert(use_lld) _rebased_orderfile = rebase_path(chrome_orderfile_path, root_build_dir) ldflags = [ @@ -2606,7 +2814,11 @@ if (is_android || (is_chromeos_ash && is_chromeos_device)) { config("default_init_stack_vars") { cflags = [] if (init_stack_vars && is_clang && !is_nacl && !using_sanitizer) { - cflags += [ "-ftrivial-auto-var-init=pattern" ] + if (init_stack_vars_zero) { + cflags += [ "-ftrivial-auto-var-init=zero" ] + } else { + cflags += [ "-ftrivial-auto-var-init=pattern" ] + } } } diff --git a/build/config/compiler/compiler.gni b/build/config/compiler/compiler.gni index 572d7a8a45dc..aa5f37fd0b06 100644 --- a/build/config/compiler/compiler.gni +++ b/build/config/compiler/compiler.gni @@ -1,12 +1,14 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +import("//build/config/c++/c++.gni") import("//build/config/chrome_build.gni") import("//build/config/chromecast_build.gni") import("//build/config/chromeos/args.gni") import("//build/config/chromeos/ui_mode.gni") import("//build/config/compiler/pgo/pgo.gni") +import("//build/config/cronet/config.gni") import("//build/config/sanitizers/sanitizers.gni") import("//build/toolchain/cc_wrapper.gni") import("//build/toolchain/goma.gni") @@ -24,6 +26,27 @@ if (is_apple) { import("//build/config/apple/symbols.gni") } +if (is_ios) { + import("//build/config/ios/config.gni") +} + +declare_args() { + # Set to true to use lld, the LLVM linker. + # In late bring-up on macOS (see docs/mac_lld.md). + # Tentatively used on iOS. + # The default linker everywhere else. + use_lld = is_clang && current_os != "zos" + + if (use_cobalt_customizations && is_apple) { + use_lld = false + } + + # If true, optimize for size. + # Default to favoring speed over size for platforms not listed below. + optimize_for_size = + !is_high_end_android && (is_android || is_ios || is_castos) +} + declare_args() { # Default to warnings as errors for default workflow, where we catch # warnings with known toolchains. Allow overriding this e.g. for Chromium @@ -67,11 +90,15 @@ declare_args() { # Use it by default on official-optimized android and Chrome OS builds, but # not ARC or linux-chromeos since it's been seen to not play nicely with # Chrome's clang. crbug.com/1033839 + # Disabled in iOS cronet builds since build step cronet_static_complete + # wants to build a .a file consumable by external clients, and they won't + # have the same LLVM revisions as us, making bitcode useless to them. use_thin_lto = - is_cfi || - (is_official_build && chrome_pgo_phase != 1 && - (is_linux || is_win || (is_android && target_os != "chromeos") || - ((is_chromeos_ash || is_chromeos_lacros) && is_chromeos_device))) + is_cfi || (is_clang && is_official_build && chrome_pgo_phase != 1 && + (is_linux || is_win || is_mac || + (is_ios && use_lld && !is_cronet_build) || + (is_android && target_os != "chromeos") || + (is_chromeos && is_chromeos_device))) # If true, use Goma for ThinLTO code generation where applicable. use_goma_thin_lto = false @@ -95,7 +122,11 @@ declare_args() { # For unofficial (e.g. development) builds and non-Chrome branded (e.g. Cronet # which doesn't use Crashpad, crbug.com/479283) builds it's useful to be able # to unwind at runtime. - exclude_unwind_tables = is_official_build + # Include the unwind tables on Android even for official builds, as otherwise + # the crash dumps generated by Android's debuggerd are largely useless, and + # having this additional mechanism to understand issues is particularly helpful + # to WebView. + exclude_unwind_tables = is_official_build && !is_android # Where to redirect clang crash diagnoses clang_diagnostic_dir = @@ -107,38 +138,57 @@ declare_args() { # performed as mitigation against Return-oriented programming (ROP). # https://chromium.googlesource.com/chromium/src/+/main/docs/design/sandbox.md#cet-shadow-stack enable_cet_shadow_stack = target_cpu == "x64" -} -assert(!is_cfi || use_thin_lto, "CFI requires ThinLTO") + # Set to true to enable using the ML inliner in LLVM. This currently only + # enables the ML inliner when targeting Android. + # Currently the ML inliner is only supported on linux hosts + use_ml_inliner = host_os == "linux" && is_android -# If true, optimize for size. Does not affect windows builds. -# Linux & Mac favor speed over size. -# TODO(brettw) it's weird that Mac and desktop Linux are different. We should -# explore favoring size over speed in this case as well. -optimize_for_size = is_android || is_chromecast || is_fuchsia || is_ios + # Set to true to use the android unwinder V2 implementation. + use_android_unwinder_v2 = true -declare_args() { # Whether we should consider the profile we're using to be accurate. Accurate # profiles have the benefit of (potentially substantial) binary size # reductions, by instructing the compiler to optimize cold and uncovered # functions heavily for size. This often comes at the cost of performance. sample_profile_is_accurate = optimize_for_size + + # Use offsets rather than pointers in vtables in order to reduce the number of + # relocations. This is safe to enable only when all C++ code is built with the + # flag set to the same value. + use_relative_vtables_abi = is_android && current_cpu == "arm64" && + use_custom_libcxx && !is_component_build +} + +# To try out this combination, delete this assert. +assert( + !use_relative_vtables_abi || !is_cfi, + "is_cfi=true is known to conflict with use_relative_vtables_abi=true.\n" + + "See https://bugs.chromium.org/p/chromium/issues/detail?id=1375035#c53") + +assert(!is_cfi || use_thin_lto, "CFI requires ThinLTO") +assert(!enable_profiling || !is_component_build, + "Cannot profile component builds (crbug.com/1199271).") + +if (use_thin_lto && is_debug) { + print("WARNING: ThinLTO (use_thin_lto=true) doesn't work with debug" + + " (is_debug=true) build.") } # Determine whether to enable or disable frame pointers, based on the platform # and build arguments. -# TODO(crbug.com/1052397): Consider changing is_chromeos_ash to is_chromeos after -# lacros-chrome switches to target_os="chromeos". -if (is_chromeos_ash || is_chromeos_lacros) { +if (is_chromeos) { # ChromeOS generally prefers frame pointers, to support CWP. # However, Clang does not currently generate usable frame pointers in ARM # 32-bit builds (https://bugs.llvm.org/show_bug.cgi?id=18505) so disable them # there to avoid the unnecessary overhead. enable_frame_pointers = current_cpu != "arm" -} else if (is_apple || is_linux || is_chromeos) { +} else if (is_apple || is_linux) { enable_frame_pointers = true } else if (is_win) { # 64-bit Windows ABI doesn't support frame pointers. + # NOTE: This setting is actually not used in the BUILD.gn for Windows, + # but it still reflects correctly that we don't emit frame pointers on x64. if (current_cpu == "x64") { enable_frame_pointers = false } else { @@ -158,10 +208,12 @@ if (is_chromeos_ash || is_chromeos_lacros) { # For caller-callee instrumentation version which needs frame pointers to # get the caller address. use_call_graph +} else if (is_fuchsia) { + # Fuchsia on arm64 could use shadow call stack for unwinding. + enable_frame_pointers = current_cpu != "arm64" } else { - # Explicitly ask for frame pointers, otherwise: - # * Stacks may be missing for sanitizer and profiling builds. - # * Debug tcmalloc can crash (crbug.com/636489). + # Explicitly ask for frame pointers, otherwise stacks may be missing for + # sanitizer and profiling builds. enable_frame_pointers = using_sanitizer || enable_profiling || is_debug } @@ -192,17 +244,9 @@ can_unwind_with_cfi_table = is_android && !is_component_build && # sampling profiler is enabled on android. enable_arm_cfi_table = is_android && !is_component_build && current_cpu == "arm" -declare_args() { - # Set to true to use lld, the LLVM linker. - # Not supported for macOS (see docs/mac_lld.md), and not functional at all for - # iOS. But used for mac cross-compile on linux (may not work properly). - # The default linker everywhere else. - use_lld = is_clang && (!is_apple || host_os == "linux") -} - declare_args() { # Whether to use the gold linker from binutils instead of lld or bfd. - use_gold = !use_lld && !(is_chromecast && is_linux && + use_gold = !use_lld && !(is_castos && (current_cpu == "arm" || current_cpu == "mipsel")) && (((is_linux || is_chromeos_lacros) && (current_cpu == "x64" || current_cpu == "x86" || @@ -222,11 +266,8 @@ declare_args() { # deterministic builds to reduce compile times, so this is less relevant for # official builders. strip_absolute_paths_from_debug_symbols_default = - # TODO(crbug.com/1010267): remove '!use_clang_coverage', coverage build has - # dependency to absolute path of source files. - !use_clang_coverage && - (is_android || is_fuchsia || is_nacl || (is_win && use_lld) || is_linux || - is_chromeos || (is_apple && !enable_dsyms)) + is_android || is_fuchsia || is_nacl || (is_win && use_lld) || is_linux || + is_chromeos || (is_apple && !enable_dsyms) # If the platform uses stripped absolute paths by default, then we don't expose # it as a configuration option. If this is causing problems, please file a bug. @@ -251,8 +292,7 @@ if (use_debug_fission == "default") { assert(symbol_level >= -1 && symbol_level <= 2, "Invalid symbol_level") if (symbol_level == -1) { if (is_android && !is_component_build && !use_debug_fission) { - # Reduce symbol level when it will cause invalid elf files to be created - # (due to file size). https://crbug.com/648948. + # Prefer faster & smaller release builds. symbol_level = 1 } else if (is_chromeos_device) { # Use lower symbol level in Simple Chrome build for faster link time. @@ -273,7 +313,7 @@ if (symbol_level == -1) { symbol_level = 1 } else if ((!is_nacl && !is_linux && !is_chromeos && !is_fuchsia && current_os != "aix") || is_debug || is_official_build || - is_chromecast) { + is_castos || is_cast_android) { # Linux builds slower by having symbols as part of the target binary, # whereas Mac and Windows have them separate, so in Release Linux, default # them off, but keep them on for Official builds and Chromecast builds. @@ -293,18 +333,15 @@ use_debug_fission = use_debug_fission && symbol_level == 2 # the build (like nacl) and we don't want to assert on those. # iOS does not support component builds so add an exception for this platform. if (forbid_non_component_debug_builds) { - assert(symbol_level != 2 || current_toolchain != default_toolchain || - is_component_build || !is_debug || is_ios, - "Can't do non-component debug builds at symbol_level=2") + assert( + symbol_level != 2 || current_toolchain != default_toolchain || + is_component_build || !is_debug || is_ios || use_debug_fission, + "Can't do non-component debug builds at symbol_level=2 without use_debug_fission=true") } -# Assert that the configuration isn't going to hit https://crbug.com/648948. -# An exception is made when target_os == "chromeos" as we only use the Android -# toolchain there to build relatively small binaries. -assert( - ignore_elf32_limitations || !is_android || target_os == "chromeos" || - is_component_build || symbol_level < 2 || use_debug_fission, - "Android 32-bit non-component builds without DWARF Fission cannot " + - "have symbol_level=2 due to 4GiB file size limit, see " + - "https://crbug.com/648948. " + "If you really want to try this out, " + - "set ignore_elf32_limitations=true.") +# TODO(crbug.com/1341436) For Windows, to assemble lzma_sdk's assembly files, +# ml64.exe needs to be utilized as llvm-ml cannot yet assemble it. Once llvm-ml +# is able to assemble lzma_sdk assembly files, remove this. +# LzmaDecOpt.asm only works on x64 and not x86. +# https://sourceforge.net/p/sevenzip/discussion/45797/thread/768932e9dd/?limit=25#0d6c +disable_llvm_ml = host_os == "win" && target_cpu == "x64" && !is_msan diff --git a/build/config/compiler/pgo/BUILD.gn b/build/config/compiler/pgo/BUILD.gn index 3e8502ed775b..86e76a41b6b7 100644 --- a/build/config/compiler/pgo/BUILD.gn +++ b/build/config/compiler/pgo/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -42,33 +42,68 @@ config("pgo_optimization_flags") { if (is_win) { if (target_cpu == "x64") { _pgo_target = "win64" - inputs = [ "//chrome/build/win64.pgo.txt" ] } else { _pgo_target = "win32" - inputs = [ "//chrome/build/win32.pgo.txt" ] } } else if (is_mac) { - _pgo_target = "mac" - inputs = [ "//chrome/build/mac.pgo.txt" ] - } else if (is_linux || is_chromeos_lacros) { + if (target_cpu == "arm64") { + _pgo_target = "mac-arm" + } else { + _pgo_target = "mac" + } + } else if (is_linux) { _pgo_target = "linux" + } else if (is_chromeos_lacros) { + if (target_cpu == "arm") { + _pgo_target = "lacros-arm" + } else if (target_cpu == "arm64") { + _pgo_target = "lacros-arm64" + } else { + _pgo_target = "lacros64" + } + } else if (is_android) { + # Temporarily use mac-arm profile until Android native PGO support works. + # TODO(crbug.com/1308749): fix this. + _pgo_target = "mac-arm" + } else if (is_fuchsia) { + if (target_cpu == "arm64") { + _pgo_target = "mac-arm" + } else { + _pgo_target = "mac" + } + } + + if (_pgo_target == "win64") { + inputs = [ "//chrome/build/win64.pgo.txt" ] + } else if (_pgo_target == "win32") { + inputs = [ "//chrome/build/win32.pgo.txt" ] + } else if (_pgo_target == "mac-arm") { + inputs = [ "//chrome/build/mac-arm.pgo.txt" ] + } else if (_pgo_target == "mac") { + inputs = [ "//chrome/build/mac.pgo.txt" ] + } else if (_pgo_target == "linux") { inputs = [ "//chrome/build/linux.pgo.txt" ] + } else if (_pgo_target == "lacros64") { + inputs = [ "//chrome/build/lacros64.pgo.txt" ] + } else if (_pgo_target == "lacros-arm") { + inputs = [ "//chrome/build/lacros-arm.pgo.txt" ] + } else if (_pgo_target == "lacros-arm64") { + inputs = [ "//chrome/build/lacros-arm64.pgo.txt" ] } - if (pgo_data_path == "" && _pgo_target != "") { - pgo_data_path = rebase_path(exec_script("//tools/update_pgo_profiles.py", - [ - "--target", - _pgo_target, - "get_profile_path", - ], - "value"), - root_build_dir) + if (_pgo_target != "" && pgo_data_path == "") { + pgo_data_path = exec_script("//tools/update_pgo_profiles.py", + [ + "--target", + _pgo_target, + "get_profile_path", + ], + "value") } assert(pgo_data_path != "", "Please set pgo_data_path to point at the profile data") cflags = [ - "-fprofile-instr-use=$pgo_data_path", + "-fprofile-use=" + rebase_path(pgo_data_path, root_build_dir), # It's possible to have some profile data legitimately missing, # and at least some profile data always ends up being considered diff --git a/build/config/compiler/pgo/pgo.gni b/build/config/compiler/pgo/pgo.gni index c053eb530bfe..9e9a0c524992 100644 --- a/build/config/compiler/pgo/pgo.gni +++ b/build/config/compiler/pgo/pgo.gni @@ -1,9 +1,11 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +import("//build/config/chrome_build.gni") import("//build/config/chromecast_build.gni") import("//build/config/chromeos/ui_mode.gni") +import("//build/config/dcheck_always_on.gni") declare_args() { # Specify the current PGO phase. @@ -11,12 +13,19 @@ declare_args() { # 0 : Means that PGO is turned off. # 1 : Used during the PGI (instrumentation) phase. # 2 : Used during the PGO (optimization) phase. + # PGO profiles are generated from `dcheck_always_on = false` builds. Mixing + # those profiles with `dcheck_always_on = true` builds can cause the compiler + # to think some code is hotter than it actually is, potentially causing very + # bad compile times. chrome_pgo_phase = 0 - if (is_official_build && + if (!dcheck_always_on && is_official_build && # TODO(crbug.com/1052397): Remove chromeos_is_browser_only once # target_os switch for lacros-chrome is completed. - (is_win || is_mac || - (is_linux && !chromeos_is_browser_only && !is_chromecast))) { + # TODO(crbug.com/1336055): Update this now-outdated condition with regard + # to chromecast and determine whether chromeos_is_browser_only is + # obsolete. + (is_high_end_android || is_win || is_mac || is_fuchsia || + (is_linux && !is_castos && !chromeos_is_browser_only))) { chrome_pgo_phase = 2 } diff --git a/build/config/compute_inputs_for_analyze.gni b/build/config/compute_inputs_for_analyze.gni index 050ab70a3fbe..1e322949fd13 100644 --- a/build/config/compute_inputs_for_analyze.gni +++ b/build/config/compute_inputs_for_analyze.gni @@ -1,4 +1,4 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/coverage/BUILD.gn b/build/config/coverage/BUILD.gn index fa0833e2a82d..59941c3cd15e 100644 --- a/build/config/coverage/BUILD.gn +++ b/build/config/coverage/BUILD.gn @@ -1,8 +1,9 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/config/coverage/coverage.gni") +import("//build/config/rust.gni") config("default_coverage") { if (use_clang_coverage) { @@ -26,17 +27,17 @@ config("default_coverage") { "-limited-coverage-experimental=true", ] + # Rust coverage is gated on using the Chromium-built Rust toolchain as it + # needs to have a compatible LLVM version with the C++ compiler and the LLVM + # tools that will be used to process the coverage output. This is because + # the coverage file format is not stable. + if (use_chromium_rust_toolchain) { + rustflags = [ "-Cinstrument-coverage" ] + } + if (is_linux || is_chromeos) { # TODO(crbug.com/1194301): Remove this flag. cflags += [ "-fno-use-cxa-atexit" ] } - - if (using_old_compiler) { - # These compiler flags aren't supported by the older clang compiler. - cflags -= [ - "-limited-coverage-experimental=true", - "-fno-use-cxa-atexit", - ] - } } } diff --git a/build/config/coverage/OWNERS b/build/config/coverage/OWNERS index 0fc481f4771c..7b0fe275df1e 100644 --- a/build/config/coverage/OWNERS +++ b/build/config/coverage/OWNERS @@ -1,3 +1 @@ -inferno@chromium.org -liaoyuke@chromium.org -ochang@chromium.org +pasthana@google.com diff --git a/build/config/coverage/coverage.gni b/build/config/coverage/coverage.gni index 9586d8d980b3..2e5b7ab741ac 100644 --- a/build/config/coverage/coverage.gni +++ b/build/config/coverage/coverage.gni @@ -1,8 +1,11 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/toolchain/toolchain.gni") +if (is_fuchsia) { + import("//third_party/fuchsia-sdk/sdk/build/component.gni") +} # There are two ways to enable code coverage instrumentation: # 1. When |use_clang_coverage| or |use_jacoco_coverage| is true and @@ -14,7 +17,11 @@ import("//build/toolchain/toolchain.gni") # input file or Java class files related to source files are instrumented. declare_args() { # Enable Clang's Source-based Code Coverage. - use_clang_coverage = false + if (is_fuchsia) { + use_clang_coverage = fuchsia_code_coverage + } else { + use_clang_coverage = false + } # Enables JaCoCo Java code coverage. use_jacoco_coverage = false diff --git a/build/config/cronet/OWNERS b/build/config/cronet/OWNERS new file mode 100644 index 000000000000..78c2d8081e45 --- /dev/null +++ b/build/config/cronet/OWNERS @@ -0,0 +1 @@ +file://components/cronet/OWNERS diff --git a/build/config/cronet/config.gni b/build/config/cronet/config.gni new file mode 100644 index 000000000000..1468ec17a05e --- /dev/null +++ b/build/config/cronet/config.gni @@ -0,0 +1,10 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +declare_args() { + # Control whether cronet is built (this is usually set by the script + # components/cronet/tools/cr_cronet.py as cronet requires specific + # gn args to build correctly). + is_cronet_build = false +} diff --git a/build/config/crypto.gni b/build/config/crypto.gni deleted file mode 100644 index dc33c5e0ea6e..000000000000 --- a/build/config/crypto.gni +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# This file declares build flags for the SSL library configuration. -# -# TODO(brettw) this should probably be moved to src/crypto or somewhere, and -# the global build dependency on it should be removed. -# -# PLEASE TRY TO AVOID ADDING FLAGS TO THIS FILE in cases where grit isn't -# required. See the declare_args block of BUILDCONFIG.gn for advice on how -# to set up feature flags. - -# True if NSS is used for certificate handling. -use_nss_certs = (is_linux || is_chromeos) && !is_starboard diff --git a/build/config/dcheck_always_on.gni b/build/config/dcheck_always_on.gni index e7d6a79bbed8..ba5eb3a78605 100644 --- a/build/config/dcheck_always_on.gni +++ b/build/config/dcheck_always_on.gni @@ -1,20 +1,39 @@ -# Copyright (c) 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +# TODO(crbug.com/1233050): Until the bug is resolved we need to include +# gclient_args for the definition of build_with_chromium and build_overrides +# for client overrides of that flag. The latter should go away. +# import("//build/config/gclient_args.gni") +import("//build_overrides/build.gni") declare_args() { # Enables DCHECKs to be built-in, but to default to being non-fatal/log-only. - # DCHECKS can then be set as fatal/non-fatal via the DCheckIsFatal feature. + # DCHECKS can then be set as fatal/non-fatal via the "DcheckIsFatal" feature. # See https://bit.ly/dcheck-albatross for details on how this is used. dcheck_is_configurable = false } declare_args() { - # Set to true to enable dcheck in Release builds. - dcheck_always_on = dcheck_is_configurable + # Set to false to disable DCHECK in Release builds. This is enabled by default + # for non-official builds on the below platforms. + # This default only affects Chromium as indicated by build_with_chromium. + # Other clients typically set this to false. If another client wants to use + # the same default value as Chromium, we'd need to add a separate gclient + # variable to replace build_with_chromium here. + dcheck_always_on = + (build_with_chromium && !is_official_build) || dcheck_is_configurable } declare_args() { - # Set to false to disable EXPENSIVE_DCHECK()s. - enable_expensive_dchecks = is_debug || dcheck_always_on + # Set to false to disable EXPENSIVE_DCHECK()s or to true to enable them in + # official builds. These are generally used for really useful DCHECKs that are + # too expensive to be enabled in user-facing official+DCHECK builds. + enable_expensive_dchecks = + is_debug || (dcheck_always_on && !is_official_build) } + +assert(!dcheck_is_configurable || (dcheck_always_on || is_debug), + "dcheck_is_configurable only makes sense with DCHECKs enabled") +assert(!enable_expensive_dchecks || (dcheck_always_on || is_debug), + "enable_expensive_dchecks only makes sense with DCHECKs enabled") diff --git a/build/config/devtools.gni b/build/config/devtools.gni new file mode 100644 index 000000000000..4338e25550f3 --- /dev/null +++ b/build/config/devtools.gni @@ -0,0 +1,37 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chrome_build.gni") +import("//build_overrides/build.gni") + +declare_args() { + if (build_with_chromium) { + # devtools_location is used in DevTools to resolve to the correct location + # for any script/file referenced in the DevTools build scripts. Since + # DevTools supports both a standalone build and build integration with + # Chromium, we need to differentiate between the two versions. + # devtools_location points to the Chromium version in both Chrome-branded + # and not Chrome-branded builds. devtools_root_location points to the root + # of the Chrome-branded version when is_chrome_branded is true and to the root + # of the Chromium version when is_chrome_branded is false. + # devtools_grd_location is the location of the GRD file listing all DevTools + # resources. + if (is_chrome_branded) { + devtools_root_location = "third_party/devtools-frontend-internal" + devtools_location = "$devtools_root_location/devtools-frontend/" + devtools_grd_location = + "$devtools_root_location/chrome_devtools_resources.grd" + } else { + devtools_root_location = "third_party/devtools-frontend/src" + devtools_location = "third_party/devtools-frontend/src/" + devtools_grd_location = + "$devtools_root_location/front_end/devtools_resources.grd" + } + } else { + # DevTools is building a standalone version + devtools_location = "" + devtools_root_location = "" + devtools_grd_location = "" + } +} diff --git a/build/config/features.gni b/build/config/features.gni index 62bf4bcb350f..852ac56a850e 100644 --- a/build/config/features.gni +++ b/build/config/features.gni @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -23,14 +23,21 @@ declare_args() { # # Note: this flag is used by WebRTC which is DEPSed into Chrome. Moving it # out of //build will require using the build_overrides directory. - proprietary_codecs = is_chrome_branded || is_chromecast + # + # Do not add any other conditions to the following line. + # + # TODO(crbug.com/1314528): Remove chromecast-related conditions and force + # builds to explicitly specify this. + proprietary_codecs = is_chrome_branded || is_castos || is_cast_android # libudev usage. This currently only affects the content layer. - use_udev = (is_linux || is_chromeos) && !is_chromecast + use_udev = (is_linux && !is_castos) || is_chromeos + + use_dbus = is_linux || is_chromeos - use_dbus = (is_linux || is_chromeos) && !is_chromecast + use_gio = is_linux && !is_castos - use_gio = is_linux && !is_chromecast + use_blink = !is_ios } # # ============================================= diff --git a/build/config/freetype/BUILD.gn b/build/config/freetype/BUILD.gn index 76cb025fbb4d..88a9c59f0a31 100644 --- a/build/config/freetype/BUILD.gn +++ b/build/config/freetype/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/freetype/freetype.gni b/build/config/freetype/freetype.gni index b4eced2d6508..60aeb0452d70 100644 --- a/build/config/freetype/freetype.gni +++ b/build/config/freetype/freetype.gni @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/fuchsia/BUILD.gn b/build/config/fuchsia/BUILD.gn index 88922a11e879..bbcd70886fe9 100644 --- a/build/config/fuchsia/BUILD.gn +++ b/build/config/fuchsia/BUILD.gn @@ -1,11 +1,14 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/config/chromecast_build.gni") +import("//build/config/clang/clang.gni") +import("//build/config/fuchsia/generate_runner_scripts.gni") +import("//third_party/fuchsia-sdk/sdk/build/config/config.gni") assert(is_fuchsia) -assert(!is_posix) +assert(!is_posix, "Fuchsia is not POSIX.") config("compiler") { configs = [ "//third_party/fuchsia-sdk/sdk/build/config:compiler" ] @@ -14,17 +17,84 @@ config("compiler") { # https://fuchsia.googlesource.com/zircon/+/master/system/private/zircon/stack.h#9), # but on other platforms it's much higher, so a variety of code assumes more # will be available. Raise to 8M which matches e.g. macOS. - ldflags = [ "-Wl,-z,stack-size=0x800000" ] + ldflags = [ + "-Wl,-z,stack-size=0x800000", + "-fexperimental-relative-c++-abi-vtables", + ] + cflags_cc = [ "-fexperimental-relative-c++-abi-vtables" ] +} + +# Files required to run on Fuchsia on isolated swarming clients. +group("deployment_resources") { + data = [ + "//build/fuchsia/", + "//build/util/lib/", + "//third_party/fuchsia-sdk/sdk/.build-id/", + "//third_party/fuchsia-sdk/sdk/bin/fuchsia-common.sh", + "//third_party/fuchsia-sdk/sdk/meta/manifest.json", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/ffx", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/ffx-meta.json", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/fvm", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/fvm-meta.json", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/merkleroot", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/merkleroot-meta.json", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/pm", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/pm-meta.json", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/symbolizer", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/symbolizer-meta.json", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/zbi", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/zbi-meta.json", + ] + + if (fuchsia_additional_boot_images == []) { + data += [ "${boot_image_root}" ] + } - # Allow this in chromium-only builds, but do not allow this in Chromecast - # builds. - if (!is_chromecast) { - cflags_cc = [ "-fexperimental-relative-c++-abi-vtables" ] - ldflags += [ "-fexperimental-relative-c++-abi-vtables" ] + foreach(fuchsia_additional_boot_image, fuchsia_additional_boot_images) { + data += [ "${fuchsia_additional_boot_image}/" ] } + + if (test_isolate_uses_emulator) { + data += [ + "//third_party/fuchsia-sdk/sdk/bin/device_launcher.version", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/fvdl", + ] + if (test_host_cpu == "x64") { + data += [ + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/aemu_internal", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/aemu_internal-meta.json", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/qemu_internal", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/qemu_internal-meta.json", + ] + } else if (test_host_cpu == "arm64") { + data += [ + "//third_party/qemu-${host_os}-${test_host_cpu}/", + + # TODO(https://crbug.com/1336776): remove when ffx has native support + # for starting emulator on arm64 host. + "//third_party/fuchsia-sdk/sdk/tools/x64/qemu_internal-meta.json", + ] + } + } +} + +# Copy the loader to place it at the expected path in the final package. +copy("sysroot_asan_libs") { + sources = + [ "${fuchsia_sdk}/arch/${target_cpu}/sysroot/dist/lib/asan/ld.so.1" ] + outputs = [ "${root_out_dir}/lib/asan/{{source_file_part}}" ] +} + +# Copy the loader to place it at the expected path in the final package. +copy("sysroot_asan_runtime_libs") { + sources = [ "$clang_base_path/lib/clang/$clang_version/lib/x86_64-unknown-fuchsia/libclang_rt.asan.so" ] + outputs = [ "${root_out_dir}/lib/{{source_file_part}}" ] } -# Settings for executables. -config("executable_config") { - ldflags = [ "-pie" ] +# This adds the runtime deps for Fuchsia ASAN builds. +group("asan_runtime_library") { + data_deps = [ + ":sysroot_asan_libs", + ":sysroot_asan_runtime_libs", + ] } diff --git a/build/config/fuchsia/DIR_METADATA b/build/config/fuchsia/DIR_METADATA index 6d8f079aa581..210aa6a954b8 100644 --- a/build/config/fuchsia/DIR_METADATA +++ b/build/config/fuchsia/DIR_METADATA @@ -1,7 +1 @@ -monorail { - component: "Fuchsia" -} - -team_email: "cr-fuchsia@chromium.org" - -os: FUCHSIA +mixins: "//build/fuchsia/COMMON_METADATA" diff --git a/build/config/fuchsia/OWNERS b/build/config/fuchsia/OWNERS index 3a1056b296fd..565fda1e097d 100644 --- a/build/config/fuchsia/OWNERS +++ b/build/config/fuchsia/OWNERS @@ -1,4 +1,5 @@ file://build/fuchsia/OWNERS -per-file *.cmx=set noparent -per-file *.cmx=file://fuchsia/SECURITY_OWNERS +chonggu@google.com +rohpavone@chromium.org +zijiehe@google.com diff --git a/build/config/fuchsia/add_DebugData_service.test-cmx b/build/config/fuchsia/add_DebugData_service.test-cmx deleted file mode 100644 index 33fb6b07c103..000000000000 --- a/build/config/fuchsia/add_DebugData_service.test-cmx +++ /dev/null @@ -1,7 +0,0 @@ -{ - "sandbox": { - "services": [ - "fuchsia.debugdata.DebugData" - ] - } -} \ No newline at end of file diff --git a/build/config/fuchsia/build_cmx_from_fragment.py b/build/config/fuchsia/build_cmx_from_fragment.py deleted file mode 100644 index ac7e34988d02..000000000000 --- a/build/config/fuchsia/build_cmx_from_fragment.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. -"""Creates a complete CMX (v1) component manifest, from a program name and - manifest fragment file.""" - -import argparse -import json -import sys - - -def BuildCmxFromFragment(output_file, fragment_file, program_binary): - """Reads a CMX fragment specifying e.g. features & sandbox, and a program - binary's filename, and writes out the full CMX. - - output_file: Build-relative filename at which to write the full CMX. - fragment_file: Build-relative filename of the CMX fragment to read from. - program_binary: Package-relative filename of the program binary. - """ - - with open(output_file, 'w') as component_manifest_file: - component_manifest = json.load(open(fragment_file, 'r')) - component_manifest.update({ - 'program': { - 'binary': program_binary - }, - }) - json.dump(component_manifest, component_manifest_file) - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument( - '--cmx-fragment', - required=True, - help='Path to the CMX fragment to read from') - parser.add_argument( - '--cmx', required=True, help='Path to write the complete CMX file to') - parser.add_argument( - '--program', - required=True, - help='Package-relative path to the program binary') - args = parser.parse_args() - - return BuildCmxFromFragment(args.cmx, args.cmx_fragment, args.program) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/build/config/fuchsia/build_symbol_archive.py b/build/config/fuchsia/build_symbol_archive.py index c763627bf915..a595ed8a7a4f 100755 --- a/build/config/fuchsia/build_symbol_archive.py +++ b/build/config/fuchsia/build_symbol_archive.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/fuchsia/config.gni b/build/config/fuchsia/config.gni index 5a00cc5a85d3..1efe24cc70f7 100644 --- a/build/config/fuchsia/config.gni +++ b/build/config/fuchsia/config.gni @@ -1,11 +1,8 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. assert(is_fuchsia) -# Compute the AEMU path. -aemu_root = "//third_party/aemu-${host_os}-${target_cpu}" - # Compute the path to the arch-specific boot image directory. -boot_image_root = "//third_party/fuchsia-sdk/images/${target_cpu}" +boot_image_root = "//third_party/fuchsia-sdk/images/" diff --git a/build/config/fuchsia/extend_fvm.py b/build/config/fuchsia/extend_fvm.py index 44e5ee30e123..ae95f6736c0a 100644 --- a/build/config/fuchsia/extend_fvm.py +++ b/build/config/fuchsia/extend_fvm.py @@ -1,4 +1,4 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/fuchsia/fuchsia_package_metadata.gni b/build/config/fuchsia/fuchsia_package_metadata.gni new file mode 100644 index 000000000000..fb33bb2bd40e --- /dev/null +++ b/build/config/fuchsia/fuchsia_package_metadata.gni @@ -0,0 +1,38 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +assert(is_fuchsia) + +# Generates a metadata file under root_gen_dir which provides information about +# a Fuchsia package. +# Parameters: +# package_deps: An array of package_paths which specify the location of all +# .far files that the package depends on. +template("fuchsia_package_metadata") { + _pkg_dir = "$root_out_dir/gen/" + get_label_info(invoker.package, "dir") + + "/" + target_name + _pkg_path = "$_pkg_dir/${target_name}.far" + pkg_dep_paths = [ rebase_path(_pkg_path, root_build_dir) ] + if (defined(invoker.package_deps)) { + foreach(package_dep, invoker.package_deps) { + _pkg_dep_target = package_dep[0] + _pkg_dep_name = package_dep[1] + pkg_dep_path = + rebase_path(get_label_info(_pkg_dep_target, "target_gen_dir") + "/" + + _pkg_dep_name + "/" + _pkg_dep_name + ".far", + root_build_dir) + pkg_dep_paths += [ pkg_dep_path ] + } + } + + pkg_metadata = "${target_name}_script_meta" + generated_file(pkg_metadata) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + contents = { + packages = pkg_dep_paths + } + output_conversion = "json" + outputs = [ "$root_gen_dir/package_metadata/${invoker.target_name}.meta" ] + } +} diff --git a/build/config/fuchsia/generate_runner_scripts.gni b/build/config/fuchsia/generate_runner_scripts.gni index d9a67521ee69..cf01659fdd5d 100644 --- a/build/config/fuchsia/generate_runner_scripts.gni +++ b/build/config/fuchsia/generate_runner_scripts.gni @@ -1,21 +1,26 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -assert(is_fuchsia) - import("//build/config/chromecast_build.gni") import("//build/config/fuchsia/config.gni") -import("//build/config/fuchsia/package.gni") +import("//build/config/fuchsia/fuchsia_package_metadata.gni") import("//build/config/gclient_args.gni") import("//build/config/sysroot.gni") import("//build/util/generate_wrapper.gni") +assert(is_fuchsia) + declare_args() { # Sets the Fuchsia Amber repository which will be used by default by the # generated installation scripts. If not specified, then no default directory # will be used. - default_fuchsia_build_dir_for_installation = "" + default_fuchsia_out_dir = "" + + # Sets the Fuchsia device node name which will be used by default by the + # generated runner scripts. If not specficed, then no default node name will + # be used. + default_fuchsia_device_node_name = "" # CPU architecture of the host used to run the tests. test_host_cpu = host_cpu @@ -25,245 +30,221 @@ declare_args() { # A list of additional Fuchsia boot images to include in the test isolates. fuchsia_additional_boot_images = [] + + # This variable controls the browser included in the Telemetry based test + # targets. + fuchsia_browser_type = "web_engine_shell" } -# Generates a script which deploys and optionally executes a package on a -# device. +# Generates a wrapper script under root_build_dir/bin that performs an +# operation, such as deployment or execution, using a package and its +# dependencies. # # Parameters: -# package: The package() target which will be run. -# package_name_override: Specifies the name of the generated package, if its -# name is different than the |package| target name. This value must match -# package_name_override in the |package| target. -# package_deps: An array of [package, package_name_override] array pairs +# output_name_format: The format string for the generated script's filename. +# The placeholder string %package% will be substituted +# with |package| (or |package_name|, if set). +# Examples: "run_%package%", "install_%package%" +# package: The package() target to run. +# package_name: Specifies the name of the generated package, if its +# filename is different than the |package| target name. This value must +# match package_name in the |package| target. +# package_deps: An array of [package, package_name] array pairs # which specify additional dependency packages to be installed # prior to execution. -# runner_script: The runner script implementation to use, relative to -# "build/fuchsia". Defaults to "test_runner.py". -# install_only: If true, executing the script will only install the package -# on the device, but not run it. -# is_test_exe: If true, the generated script will run the command under -# test_env.py and add arguments expected to be passed to test exes. -template("fuchsia_package_runner") { - forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "runner_script" ]) - - if (defined(invoker.package_name_override)) { - _pkg_shortname = invoker.package_name_override +# executable: The underlying script to be called by the script. +# executable_args: The list of arguments to pass to |executable|. +# Runtime commandline arguments can be passed to +# |executable| using the placeholder %args%. +# +# In addition, the script is passed the following +# executable_args: +# --package - the path to a .FAR package to install. +# --package_name - the name of the package to use as an +# entry point. +# include_fuchsia_out_dir: If true, adds |default_fuchsia_out_dir| +# to executable_args (when set in GN args). +template("fuchsia_run_script_with_packages") { + if (defined(invoker.package_name)) { + _pkg_shortname = invoker.package_name } else { _pkg_shortname = get_label_info(invoker.package, "name") } - _pkg_dir = "$root_out_dir/gen/" + get_label_info(invoker.package, "dir") + - "/" + _pkg_shortname - _package_path = "$_pkg_dir/${_pkg_shortname}.far" + _generated_script_path = + "$root_build_dir/bin/" + + string_replace(invoker.output_name_format, "%package%", _pkg_shortname) - generated_run_pkg_script_path = "$root_build_dir/bin/run_${_pkg_shortname}" - generated_install_pkg_script_path = - "$root_build_dir/bin/install_$_pkg_shortname" + generate_wrapper(target_name) { + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "executable", + "executable_args", + "data", + "include_fuchsia_out_dir", + "target", + ]) - _generate_runner_target = "${target_name}__generate_runner" - _generate_installer_target = "${target_name}__generate_installer" + wrapper_script = _generated_script_path + deps = [ invoker.package ] - # Generates a script which installs and runs a test. - generate_wrapper(_generate_runner_target) { - forward_variables_from(invoker, [ "target" ]) + if (!defined(data_deps)) { + data_deps = [] + } + data_deps += [ "//build/config/fuchsia:deployment_resources" ] - _is_test_exe = defined(invoker.is_test_exe) && invoker.is_test_exe + _combined_package_list = [ invoker.package ] - if (defined(runner_script)) { - _runner_script = runner_script - } else { - _runner_script = "//build/fuchsia/test_runner.py" + if (defined(invoker.package_deps)) { + foreach(package_dep, invoker.package_deps) { + _combined_package_list += [ package_dep[0] ] + } } - - if (_is_test_exe) { - executable = "//testing/test_env.py" - executable_args = - [ "@WrappedPath(" + rebase_path(_runner_script, root_out_dir) + ")" ] - data = [ - _runner_script, - "//.vpython", + foreach(package_dep, _combined_package_list) { + data_deps += [ + package_dep, + package_dep + "__archive-manifest", + package_dep + "__archive-metadata", ] - data_deps = [ "//testing:test_scripts_shared" ] - } else { - executable = rebase_path(_runner_script) - executable_args = [] - data = [] - data_deps = [] } - if (defined(invoker.data)) { - data += invoker.data - } - - wrapper_script = generated_run_pkg_script_path - - data_deps += [ - invoker.package, - - # Runner scripts require access to "ids.txt" for symbolization, and to - # the "package" from which to get the name & version to deploy, which - # are outputs of the archive manifest generation action. - "${invoker.package}__archive-manifest", - - # Runner scripts require access to "meta.far" from which to calculate the - # expected Merkle root for the package, to verify it has been cached. - "${invoker.package}__archive-metadata", - ] if (defined(invoker.data_deps)) { data_deps += invoker.data_deps } - # Declares the files that are needed for test execution on the - # swarming test client. - data += [ - "//build/fuchsia/", - "//build/util/lib/", - "//third_party/fuchsia-sdk/sdk/.build-id/", - "//third_party/fuchsia-sdk/sdk/bin/fpave.sh", - "//third_party/fuchsia-sdk/sdk/bin/fuchsia-common.sh", - "//third_party/fuchsia-sdk/sdk/meta/manifest.json", - ] - - # TODO(crbug.com/1137662): Remove checkout_fuchsia_for_arm64_host from - # gclient_gn_args in //DEPS as well as this condition when builders have - # test_host_cpu set correctly. - if (checkout_fuchsia_for_arm64_host) { - test_host_cpu = "arm64" + # Compute the list of full paths to package files, including dependencies. + if (defined(invoker.package_deps)) { + foreach(package_dep, invoker.package_deps) { + package_dep_target = package_dep[0] + deps += [ package_dep_target ] + data_deps += [ package_dep_target ] + } } - if (test_host_cpu == "x64") { - data_deps += - [ "//build/config/clang:llvm-symbolizer_data($host_toolchain)" ] + # Include package information inside the wrapper script. + if (!defined(executable_args)) { + executable_args = [] } - data += [ - "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/device-finder", - "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/fvm", - "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/merkleroot", - "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/pm", + if (defined(include_fuchsia_out_dir) && include_fuchsia_out_dir && + default_fuchsia_out_dir != "") { + executable_args += [ + "--fuchsia-out-dir", + default_fuchsia_out_dir, + ] + } + } - # TODO(crbug.com/1162314) Remove "symbolize" when transition to - # "symbolizer" is complete. - "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/symbolize", + # Create a wrapper script rather than using a group() in order to ensure + # "ninja $target_name" always works. + if (defined(invoker.executable_wrapper)) { + generate_wrapper(invoker.executable_wrapper) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + executable = _generated_script_path + wrapper_script = "$root_build_dir/${invoker.executable_wrapper}" + deps = [ ":${invoker._run_target}" ] + } + } +} - "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/symbolizer", - "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/zbi", +# Generates a script which deploys a package to the TUF repo of a Fuchsia +# build output directory. +template("fuchsia_package_installer") { + if (defined(invoker.package_name)) { + pkg_shortname = invoker.package_name + } else { + pkg_shortname = get_label_info(invoker.package, "name") + } + fuchsia_package_metadata(pkg_shortname) { + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "package", + "package_deps", + ]) + } + fuchsia_run_script_with_packages(target_name) { + forward_variables_from(invoker, + "*", + TESTONLY_AND_VISIBILITY + [ "executable_args" ]) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + executable = rebase_path("//build/fuchsia/test/deploy_to_fuchsia.py") + executable_args = [ + "--out-dir", + "@WrappedPath(.)", + pkg_shortname, ] + output_name_format = "deploy_%package%" + include_fuchsia_out_dir = true + } +} - if (test_isolate_uses_emulator) { - data += [ - "${boot_image_root}/qemu/qemu-kernel.kernel", - "${boot_image_root}/qemu/storage-full.blk", - "${boot_image_root}/qemu/zircon-a.zbi", - "//third_party/qemu-${host_os}-${test_host_cpu}/", - ] +# Generates scripts for installing and running test packages. +# See fuchsia_run_script_with_packages() for the full list of parameters. +template("fuchsia_test_runner") { + _run_target = "${target_name}__runner" + _install_target = "${target_name}__installer" - # Include AEMU for x64 emulator hosts and for arm64 hosts. - if (test_host_cpu == "x64" || test_host_cpu == "arm64") { - data += [ "${aemu_root}/" ] - } - } + fuchsia_run_script_with_packages(_run_target) { + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "data", + "data_deps", + "package", + "package_name", + "package_deps", + ]) - foreach(fuchsia_additional_boot_image, fuchsia_additional_boot_images) { - data += [ "${fuchsia_additional_boot_image}/" ] - } + _test_runner_py = "//build/fuchsia/test/run_test.py" - package_paths = [ rebase_path(_package_path, root_build_dir) ] - if (defined(invoker.package_deps)) { - foreach(package_dep, invoker.package_deps) { - package_dep_target = package_dep[0] - package_dep_name = package_dep[1] - - data_deps += [ - package_dep_target, - package_dep_target + "__archive-manifest", - package_dep_target + "__archive-metadata", - ] - package_dep_path = rebase_path( - get_label_info(package_dep_target, "target_gen_dir") + "/" + - package_dep_name + "/" + package_dep_name + ".far", - root_build_dir) - package_paths += [ package_dep_path ] - } - } + executable = rebase_path(_test_runner_py) - foreach(package_path, package_paths) { - executable_args += [ - "--package", - "@WrappedPath(${package_path})", - ] + if (defined(invoker.is_test_exe) && invoker.is_test_exe) { + data += [ "//.vpython3" ] } + output_name_format = "run_%package%" + executable_wrapper = invoker.target_name - executable_args += [ + # Populate the arguments used by the test runner, defined at build-time. + executable_args = [ "--out-dir", "@WrappedPath(.)", - "--target-cpu", - target_cpu, - "--package-name", - _pkg_shortname, ] + executable_args += [ package_name ] + if (defined(invoker.use_test_server) && invoker.use_test_server) { executable_args += [ "--enable-test-server" ] } - if (default_fuchsia_build_dir_for_installation != "") { + if (default_fuchsia_device_node_name != "") { executable_args += [ - "--fuchsia-out-dir", - default_fuchsia_build_dir_for_installation, + "--target-id", + default_fuchsia_device_node_name, ] } - } - - # Produces a script which installs a package and its dependencies into the - # Amber repository of a pre-existing Fuchsia build directory. - generate_wrapper(_generate_installer_target) { - executable = rebase_path("//build/fuchsia/deploy_to_amber_repo.py") - wrapper_script = generated_install_pkg_script_path - - data_deps = [ invoker.package ] - if (defined(invoker.data_deps)) { - data_deps += invoker.data_deps - } - # Build a list of all packages to install, and pass the list to the runner - # script. - package_paths = [ rebase_path(_package_path, root_build_dir) ] - if (defined(invoker.package_deps)) { - foreach(package_dep, invoker.package_deps) { - package_dep_target = package_dep[0] - package_dep_name = package_dep[1] - - data_deps += [ package_dep_target ] - package_dep_path = rebase_path( - get_label_info(package_dep_target, "target_gen_dir") + "/" + - package_dep_name + "/" + package_dep_name + ".far", - root_build_dir) - package_paths += [ package_dep_path ] - } + # Declare the files that are needed for test execution on LUCI swarming + # test clients, both directly (via data) or indirectly (via data_deps). + if (!defined(data)) { + data = [] } - executable_args = [] - foreach(package_path, package_paths) { - executable_args += [ - "--package", - "@WrappedPath(${package_path})", - ] + data += [ + _test_runner_py, + "$root_gen_dir/package_metadata/${invoker.package_name}.meta", + ] - if (default_fuchsia_build_dir_for_installation != "") { - executable_args += [ - "--fuchsia-out-dir", - default_fuchsia_build_dir_for_installation, - ] - } + # TODO(crbug.com/1256870): Remove this once all out-of-tree references + # to "package_name_override" are migrated to "package_name". + if (defined(invoker.package_name_override)) { + package_name = invoker.package_name_override } } - - group(target_name) { - deps = [ ":${_generate_installer_target}" ] - - if (!defined(invoker.install_only) || invoker.install_only == false) { - deps += [ ":${_generate_runner_target}" ] - } + fuchsia_package_installer(_install_target) { + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "package", + "package_name", + "package_deps", + ]) } } diff --git a/build/config/fuchsia/gfx_tests.cmx b/build/config/fuchsia/gfx_tests.cmx deleted file mode 100644 index c02975d821f9..000000000000 --- a/build/config/fuchsia/gfx_tests.cmx +++ /dev/null @@ -1,30 +0,0 @@ -{ - "sandbox": { - "features": [ - "deprecated-ambient-replace-as-executable", - "isolated-persistent-storage", - "isolated-temp", - "vulkan" - ], - "dev": [ - "null", - "zero" - ], - "services": [ - "fuchsia.accessibility.semantics.SemanticsManager", - "fuchsia.device.NameProvider", - "fuchsia.fonts.Provider", - "fuchsia.intl.PropertyProvider", - "fuchsia.logger.LogSink", - "fuchsia.memorypressure.Provider", - "fuchsia.process.Launcher", - "fuchsia.sys.Environment", - "fuchsia.sys.Loader", - "fuchsia.sysmem.Allocator", - "fuchsia.tracing.provider.Registry", - "fuchsia.ui.policy.Presenter", - "fuchsia.ui.scenic.Scenic", - "fuchsia.vulkan.loader.Loader" - ] - } -} diff --git a/build/config/fuchsia/package.gni b/build/config/fuchsia/package.gni deleted file mode 100644 index ff6ffd0a3d2b..000000000000 --- a/build/config/fuchsia/package.gni +++ /dev/null @@ -1,114 +0,0 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import("//third_party/fuchsia-sdk/sdk/build/component.gni") -import("//third_party/fuchsia-sdk/sdk/build/package.gni") - -# DEPRECATED: Use the Fuchsia SDK's fuchsia_component() and fuchsia_package() -# templates directly, in new code. -# -# Creates a Fuchsia .far package file containing a Fuchsia component. -# -# Parameters are: -# package_name_override: Specifies the name of the package to generate, -# if different than |target_name|. -# binary: The executable target which should be launched. -# manifest: A path to the manifest that will be used. -# "testonly" targets default to using -# //build/config/fuchsia/tests-with-exec.cmx. -# Non-test targets must explicitly specify a |manifest|. -# additional_manifests: Manifest files that should be included in the package in -# the /meta directory. This allows to package more than one component per -# manifest. These manifest files must specify program/binary to run, which -# is not required for the main manifest file where this parameter is added -# during build. -# component_name_override: If set, specifies the name of the component. -# By default, the component name is the same as the package name. -# deps: Additional targets to build and include in the package (optional). -# -# TODO(https://crbug.com/1050703): Migrate consumers to GN SDK equivalents. -template("cr_fuchsia_package") { - assert(defined(invoker.binary)) - - if (defined(invoker.package_name_override)) { - _package_name = invoker.package_name_override - } else { - _package_name = invoker.target_name - } - - _package_contents = [ invoker.binary ] - if (defined(invoker.deps)) { - _package_contents += invoker.deps - } - - _component_cmx_target = target_name + "__cr-component-cmx" - _component_target = target_name + "__cr-component" - _package_components = [ ":${_component_target}" ] - _component_manifest = "${target_gen_dir}/${target_name}.cmx" - - # Process the CMX fragment in |manifest| to get a full manifest. - action(_component_cmx_target) { - forward_variables_from(invoker, - [ - "deps", - "testonly", - ]) - - script = "//build/config/fuchsia/build_cmx_from_fragment.py" - - inputs = [ invoker.manifest ] - outputs = [ _component_manifest ] - - args = [ - "--cmx-fragment", - rebase_path(invoker.manifest), - "--cmx", - rebase_path(_component_manifest), - "--program", - get_label_info(invoker.binary, "name"), - ] - } - - # Declare the primary component for this package. - fuchsia_component(_component_target) { - forward_variables_from(invoker, [ "testonly" ]) - - deps = [ ":${_component_cmx_target}" ] - data_deps = _package_contents - manifest = _component_manifest - - if (defined(invoker.component_name_override)) { - manifest_output_name = "${invoker.component_name_override}" - } else { - manifest_output_name = "${_package_name}" - } - } - - # Bundle manifests providing additional entrypoints into the package. - if (defined(invoker.additional_manifests)) { - foreach(filename, invoker.additional_manifests) { - _additional_component_target = - target_name + "_" + get_path_info(filename, "name") - _package_components += [ ":${_additional_component_target}" ] - fuchsia_component(_additional_component_target) { - forward_variables_from(invoker, [ "testonly" ]) - data_deps = _package_contents - manifest = filename - - # Depend upon the invoker's |deps|, in case they include a dependency - # responsible for generating this additional component's manifest file. - deps = _package_contents - } - } - } - - fuchsia_package(target_name) { - forward_variables_from(invoker, [ "testonly" ]) - package_name = _package_name - if (defined(invoker.excluded_files)) { - excluded_files = invoker.excluded_files - } - deps = _package_components - } -} diff --git a/build/config/fuchsia/packaged_content_embedder_excluded_dirs.gni b/build/config/fuchsia/packaged_content_embedder_excluded_dirs.gni new file mode 100644 index 000000000000..f179a66d792e --- /dev/null +++ b/build/config/fuchsia/packaged_content_embedder_excluded_dirs.gni @@ -0,0 +1,16 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/devtools.gni") + +assert(is_fuchsia) + +# List of transitively included directories that should be stripped from +# released packages for size reasons. For use with the |excluded_dirs| variable +# of fuchsia_package(). +FUCHSIA_PACKAGED_CONTENT_EMBEDDER_EXCLUDED_DIRS = [ + # These are mistakenly being shipped in both PAK form and runtime data deps. + # TODO(crbug.com/1265660): Remove when DevTools stops leaking its source list. + devtools_root_location, +] diff --git a/build/config/fuchsia/rules.gni b/build/config/fuchsia/rules.gni deleted file mode 100644 index 689e130b651f..000000000000 --- a/build/config/fuchsia/rules.gni +++ /dev/null @@ -1,5 +0,0 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import("//build/config/fuchsia/generate_runner_scripts.gni") diff --git a/build/config/fuchsia/size_optimized_cast_receiver_args.gn b/build/config/fuchsia/size_optimized_cast_receiver_args.gn new file mode 100644 index 000000000000..9a366c7bfead --- /dev/null +++ b/build/config/fuchsia/size_optimized_cast_receiver_args.gn @@ -0,0 +1,43 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file contains feature and optimization overrides that are commonly +# required or useful for Cast Receiver implementations. +# It prioritizes size and disables unneeded features that may add size. +# +# To use it do one of the following: +# * Add the following to your `gn args`: +# import("//build/config/fuchsia/size_optimized_cast_receiver_args.gn") +# * Add the following to `gn_args` in a bot recipe: +# 'args_file': '//build/config/fuchsia/size_optimized_cast_receiver_args.gn' + +# There is no reason these values couldn't be used on other platforms, but this +# file is in a fuchsia/ directory and some refactoring would probably be +# appropriate before reusing this file. +# It is not possible to assert the platform because `target_os` is not defined +# when this file is imported. + +enable_printing = false +enable_cast_receiver = true +cast_streaming_enable_remoting = true +enable_dav1d_decoder = false +enable_v8_compile_hints = false + +# //chrome makes many assumptions that Extensions are enabled. +# TODO(crbug.com/1363742): Fix theses assumptions or avoid building it. +# enable_extensions = false + +enable_hidpi = false +enable_libaom = false +enable_library_cdms = false +enable_logging_override = true +enable_pdf = false +enable_plugins = false +optimize_for_size = true +optional_trace_events_enabled = false + +# Ensure PGO and ThinLTO are disabled as these optimizations increase the binary +# size (see crbug.com/1322959). +chrome_pgo_phase = 0 +use_thin_lto = false diff --git a/build/config/fuchsia/size_optimized_cast_receiver_args_internal.gn b/build/config/fuchsia/size_optimized_cast_receiver_args_internal.gn new file mode 100644 index 000000000000..b59ce96a6ef0 --- /dev/null +++ b/build/config/fuchsia/size_optimized_cast_receiver_args_internal.gn @@ -0,0 +1,18 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This is a version of size_optimized_cast_receiver_args.gn that is intended for +# internal builds and requires src-internal. +# +# To use it do one of the following: +# * Add the following to your `gn args`: +# import("build/config/fuchsia/size_optimized_cast_receiver_args_internal.gn") +# * Add the following to `gn_args` in a bot recipe: +# 'args_file': '//build/config/fuchsia/size_optimized_cast_receiver_args_internal.gn' + +import("//build/config/fuchsia/size_optimized_cast_receiver_args.gn") + +enable_widevine = true +use_internal_isolated_origins = true +use_official_google_api_keys = false diff --git a/build/config/fuchsia/sizes.gni b/build/config/fuchsia/sizes.gni index 20a5bf831e3a..fc9767622355 100644 --- a/build/config/fuchsia/sizes.gni +++ b/build/config/fuchsia/sizes.gni @@ -1,7 +1,9 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +assert(is_fuchsia) + import("//build/util/generate_wrapper.gni") template("compute_fuchsia_package_sizes") { @@ -28,11 +30,14 @@ template("compute_fuchsia_package_sizes") { # Declares the files that are needed for test execution on the # swarming test client. + # TODO(crbug.com/1347172): Remove arm64 once the execution of fuchsia_sizes + # has been migrated to x64 machines. data += [ "//build/fuchsia/", - "//fuchsia/release/size_tests/", + "//tools/fuchsia/size_tests/", "//third_party/fuchsia-sdk/sdk/arch/", - "//third_party/fuchsia-sdk/sdk/tools/${target_cpu}/", + "//third_party/fuchsia-sdk/sdk/tools/arm64/", + "//third_party/fuchsia-sdk/sdk/tools/x64/", ] executable_args = [ diff --git a/build/config/fuchsia/symbol_archive.gni b/build/config/fuchsia/symbol_archive.gni index 9dcb53cae0fd..e05af1155c0b 100644 --- a/build/config/fuchsia/symbol_archive.gni +++ b/build/config/fuchsia/symbol_archive.gni @@ -1,4 +1,4 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -8,7 +8,7 @@ assert(is_fuchsia) # ".build_ids" convention used by the symbolizer and GNU GDB. # # Parameters: -# deps: Must all be cr_fuchsia_package() or fuchsia_package() targets. +# deps: Must all be fuchsia_package() targets. # ids_txt: The "ids.txt" file which lists the relative paths to unstripped # executables and libraries, along with their build IDs. # archive_name: The path to the compressed tarball that will be generated. diff --git a/build/config/fuchsia/test/OWNERS b/build/config/fuchsia/test/OWNERS index 3be17de545eb..ac711c0605b7 100644 --- a/build/config/fuchsia/test/OWNERS +++ b/build/config/fuchsia/test/OWNERS @@ -1,7 +1,7 @@ file://build/fuchsia/OWNERS -per-file *.test-cmx=set noparent -per-file *.test-cmx=ddorwin@chromium.org -per-file *.test-cmx=wez@chromium.org +per-file *.test-cml=set noparent +per-file *.test-cml=ddorwin@chromium.org +per-file *.test-cml=wez@chromium.org # Please prefer the above when possible. -per-file *.test-cmx=file://fuchsia/SECURITY_OWNERS +per-file *.test-cml=file://build/fuchsia/SECURITY_OWNERS diff --git a/build/config/fuchsia/test/README.md b/build/config/fuchsia/test/README.md index 8427f4a49fc9..d21cdb79a412 100644 --- a/build/config/fuchsia/test/README.md +++ b/build/config/fuchsia/test/README.md @@ -1,31 +1,82 @@ -## CMX Fragments +## Manifest Fragments -This directory contains the cmx fragments that are required for running -Fuchsia tests hermetically. Tests start from `minimum_capabilities.test-cmx` -and add additional capabilities as necessary by providing the +This directory contains the manifest fragments that are required for running +Fuchsia tests hermetically. Tests start from `minimum.shard.test-cml` and add +additional capabilities as necessary by providing the `additional_manifest_fragments` argument. Some fragments are explained in detail below: ### General Purpose Fragments -#### font_capabilities.test-cmx -For tests that test fonts by providing `fuchsia.fonts.Provider`. +#### archivist.shard.test-cml +Runs an `archivist-without-attribution` with custom protocol routing for tests +that want to intercept events written to a `LogSink` by a component. -#### jit_capabilities.test-cmx +#### chromium_test_facet.shard.test-cml +Runs tests in the `chromium` test realm, which is mostly hermetic but has access +to specific system services that cannot (currently) be faked. For more +information, see https://fxbug.dev/91934. This is generally required for all +Chromium tests not using the +[`chromium_system_test_facet`](#chromium_system_test_facetshardtest-cml). + +#### fonts.shard.test-cml +For tests that test fonts by providing `fuchsia.fonts.Provider`. This shard +runs an isolated font provider, but serves the fonts present on the system. + +#### test_fonts.shard.test-cml +For tests that use the fonts in `//third_party/test_fonts` by way of +`//skia:test_fonts_cfv2`. + +#### mark_vmo_executable.shard.test-cml Required by tests that execute JavaScript. Should only be required in a small number of tests. -#### minimum_capabilites.test-cmx -Capabilities required by anything that uses `//base/test`, used as the base -fragment for all test suites. +#### minimum.shard.test-cml +Capabilities required by anything that uses `//base/test` when running in the +(default) `chromium` test realm. It is the default base fragment for most +`test()` Components. -#### read_debug_data.test-cmx -Required by tests that need access to its debug directory. Should only be -required in a small number of tests. +The system-wide `config-data` directory capability is routed to tests running in +the realm so that individual tests may route subdirectories as needed. +TODO(crbug.com/1360077): Remove this after migrating to the new mechanism. -#### test_logger_capabilities.test-cmx +#### logger.shard.test-cml For tests that test logging functionality by providing `fuchsia.logger.Log`. +#### sysmem.shard.test-cml +For tests that depend on the sysmem service (e.g. to allocate image buffers to +share with Vulkan and Scenic). + +#### system_test_minimum.shard.test-cml +Capabilities required by anything that uses `//base/test` when running as a +system test in the `chromium-system` test realm. It is the base fragment for +`test()` Components that use the +[`chromium_system_test_facet`](#chromium_system_test_facetshardtest-cml). + +Most tests use the [`minimum`](#minimumshardtest-cml) shard. + +#### chromium_system_test_facet.shard.test-cml +Runs tests in the `chromium-system` test realm. This is required for Chromium +tests that are intended to run against the actual system and its real system +services. This is required for, for example, performance tests intended to +measure system performance. Another overlapping use case is tests that need to +be run in environments without access to the packages containing fake +implementations of required protocols that other tests use. +(https://crbug.com/1408597 should make that use case obsolete.) + +Most tests should use the +[`chromium_test_facet`](#chromium_test_facetshardtest-cml). + +#### test_ui_stack.shard.test-cml +For tests that need an isolated UI subsystem, that supports the Flatland +API set. This allows tests to e.g. run with view-focus unaffected by any +other tests running concurrently on the device, as well as providing test-only +functionality such as input-injection support. + +#### gfx_test_ui_stack.shard.test-cml +For tests that need an isolated display subsystem supporting the legacy +Scenic/GFX APIs. + ### WebEngine Fragments The following fragments are specific to WebEngine functionality as documented documentation at @@ -33,29 +84,29 @@ https://fuchsia.dev/reference/fidl/fuchsia.web#CreateContextParams and https://fuchsia.dev/reference/fidl/fuchsia.web#ContextFeatureFlags. Any test-specific exceptions are documented for each file. -#### audio_capabilities.test-cmx -Corresponds to the `AUDIO` flag. Required for enabling audio input and output. +#### audio_output.shard.test-cml +Required by tests that need to enable audio output. + +#### platform_video_codecs.shard.test-cml +Required by tests that need accelerated (e.g., hardware) video codecs. A private +(semi-isolated) instance of codec_factory is run for tests using this shard in +support of running on system images that don't run it. -#### network_capabilities.test-cmx +#### network.shard.test-cml +For tests that need access to network services, including those that access a +local HTTP server. + +#### network.shard.test-cml Corresponds to the `NETWORK` flag. Required for enabling network access. Note that access to the root SSL certificates is not needed if ContextProvider is used to launch the `Context`. The `fuchsia.device.NameProvider` dependency comes from fdio. -#### present_view_capabilities.test-cmx +#### present_view.shard.test-cml Services that are needed to render web content in a Scenic view and present it. Most services are required per the FIDL documentation. -`fuchsia.ui.policy.Presenter` is additionally required by tests that create -views. - -#### vulkan_capabilities.test-cmx -Corresponds to the `VULKAN` flag. Required for enabling GPU-accelerated -rendering of the web content. - -#### web_engine_required_capabilities.test-cmx -Contains services that need to be present when creating a -`fuchsia.web.Context`. Note that the `fuchsia.scheduler.ProfileProvider` service -is only used in tests that encounter memory pressure code. -#### web_instance_host_capabilities.test-cmx -Contains services that need to be present to use `WebInstanceHost`. \ No newline at end of file +#### web_instance.shard.test-cml +Contains services that need to be present when creating a `fuchsia.web.Context`. +Note that the `fuchsia.scheduler.ProfileProvider` service is only used in tests +that encounter memory pressure code. diff --git a/build/config/fuchsia/test/access_test_data_dir.test-cmx b/build/config/fuchsia/test/access_test_data_dir.test-cmx deleted file mode 100644 index 57577785438b..000000000000 --- a/build/config/fuchsia/test/access_test_data_dir.test-cmx +++ /dev/null @@ -1,7 +0,0 @@ -{ - "sandbox": { - "features": [ - "isolated-cache-storage" - ] - } -} \ No newline at end of file diff --git a/build/config/fuchsia/test/archivist.shard.test-cml b/build/config/fuchsia/test/archivist.shard.test-cml new file mode 100644 index 000000000000..b85162f6d05c --- /dev/null +++ b/build/config/fuchsia/test/archivist.shard.test-cml @@ -0,0 +1,28 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + children: [ + { + name: "isolated_archivist", + url: "fuchsia-pkg://fuchsia.com/archivist-without-attribution#meta/archivist-without-attribution.cm", + }, + ], + use: [ + { + protocol: "fuchsia.logger.Log", + path: "/svc/fuchsia.logger.Log.isolated", + from: "#isolated_archivist", + }, + { + protocol: "fuchsia.logger.LogSink", + path: "/svc/fuchsia.logger.LogSink.isolated", + from: "#isolated_archivist", + }, + ], + facets: { + "fuchsia.test": { + "deprecated-allowed-packages": [ "archivist-without-attribution" ], + }, + }, +} diff --git a/build/config/fuchsia/test/audio_capabilities.test-cmx b/build/config/fuchsia/test/audio_capabilities.test-cmx deleted file mode 100644 index 2e2013f9eaf6..000000000000 --- a/build/config/fuchsia/test/audio_capabilities.test-cmx +++ /dev/null @@ -1,18 +0,0 @@ -{ - "facets": { - "fuchsia.test": { - "injected-services": { - "fuchsia.mediacodec.CodecFactory": "fuchsia-pkg://fuchsia.com/codec_factory#meta/codec_factory.cmx" - }, - "system-services": [ - "fuchsia.media.Audio" - ] - } - }, - "sandbox": { - "services": [ - "fuchsia.media.Audio", - "fuchsia.mediacodec.CodecFactory" - ] - } -} \ No newline at end of file diff --git a/build/config/fuchsia/test/audio_output.shard.test-cml b/build/config/fuchsia/test/audio_output.shard.test-cml new file mode 100644 index 000000000000..9176f6cdf436 --- /dev/null +++ b/build/config/fuchsia/test/audio_output.shard.test-cml @@ -0,0 +1,16 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + use: [ + { + protocol: [ + // TODO(crbug.com/1348174): Rather than require the system to provide + // capabilities straight from audio_core, we should run Chromium tests + // against an audio stack with fake device(s). + "fuchsia.media.Audio", + "fuchsia.media.AudioDeviceEnumerator", + ] + }, + ], +} diff --git a/build/config/fuchsia/test/chromium_system_test_facet.shard.test-cml b/build/config/fuchsia/test/chromium_system_test_facet.shard.test-cml new file mode 100644 index 000000000000..cdf9ca7a0b7f --- /dev/null +++ b/build/config/fuchsia/test/chromium_system_test_facet.shard.test-cml @@ -0,0 +1,8 @@ +// Copyright 2023 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + facets: { + "fuchsia.test": { type: "chromium-system" }, + }, +} diff --git a/build/config/fuchsia/test/chromium_test_facet.shard.test-cml b/build/config/fuchsia/test/chromium_test_facet.shard.test-cml new file mode 100644 index 000000000000..3628cf400da2 --- /dev/null +++ b/build/config/fuchsia/test/chromium_test_facet.shard.test-cml @@ -0,0 +1,8 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + facets: { + "fuchsia.test": { type: "chromium" }, + }, +} diff --git a/build/config/fuchsia/test/context_provider.shard.test-cml b/build/config/fuchsia/test/context_provider.shard.test-cml new file mode 100644 index 000000000000..e5db2f1f6fdf --- /dev/null +++ b/build/config/fuchsia/test/context_provider.shard.test-cml @@ -0,0 +1,30 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + children: [ + { + name: "context_provider", + url: "fuchsia-pkg://fuchsia.com/web_engine#meta/context_provider.cm", + }, + ], + use: [ + { + protocol: [ + "fuchsia.web.ContextProvider", + ], + from: "#context_provider", + dependency: "weak", + }, + ], + offer: [ + { + protocol: [ + "fuchsia.feedback.ComponentDataRegister", + "fuchsia.feedback.CrashReportingProductRegister", + ], + from: "parent", + to: "#context_provider", + }, + ], +} diff --git a/build/config/fuchsia/test/elf_test_ambient_exec_runner.shard.test-cml b/build/config/fuchsia/test/elf_test_ambient_exec_runner.shard.test-cml new file mode 100644 index 000000000000..c9328c56f6cf --- /dev/null +++ b/build/config/fuchsia/test/elf_test_ambient_exec_runner.shard.test-cml @@ -0,0 +1,17 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + program: { + runner: "elf_test_ambient_exec_runner", + }, + capabilities: [ + { protocol: "fuchsia.test.Suite" }, + ], + expose: [ + { + protocol: "fuchsia.test.Suite", + from: "self", + }, + ], +} diff --git a/build/config/fuchsia/test/elf_test_runner.shard.test-cml b/build/config/fuchsia/test/elf_test_runner.shard.test-cml new file mode 100644 index 000000000000..c97e6d7c0d33 --- /dev/null +++ b/build/config/fuchsia/test/elf_test_runner.shard.test-cml @@ -0,0 +1,17 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + program: { + runner: "elf_test_runner", + }, + capabilities: [ + { protocol: "fuchsia.test.Suite" }, + ], + expose: [ + { + protocol: "fuchsia.test.Suite", + from: "self", + }, + ], +} diff --git a/build/config/fuchsia/test/font_capabilities.test-cmx b/build/config/fuchsia/test/font_capabilities.test-cmx deleted file mode 100644 index 4c8661bb80a5..000000000000 --- a/build/config/fuchsia/test/font_capabilities.test-cmx +++ /dev/null @@ -1,14 +0,0 @@ -{ - "facets": { - "fuchsia.test": { - "injected-services": { - "fuchsia.fonts.Provider": "fuchsia-pkg://fuchsia.com/fonts#meta/fonts.cmx", - } - } - }, - "sandbox": { - "services": [ - "fuchsia.fonts.Provider" - ] - } -} \ No newline at end of file diff --git a/build/config/fuchsia/test/fonts.shard.test-cml b/build/config/fuchsia/test/fonts.shard.test-cml new file mode 100644 index 000000000000..80fb0cae12be --- /dev/null +++ b/build/config/fuchsia/test/fonts.shard.test-cml @@ -0,0 +1,38 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + children: [ + { + name: "isolated_font_provider", + url: "fuchsia-pkg://fuchsia.com/fonts#meta/fonts.cm", + }, + ], + use: [ + { + protocol: "fuchsia.fonts.Provider", + from: "#isolated_font_provider", + }, + ], + offer: [ + { + directory: "config-data", + from: "parent", + to: "#isolated_font_provider", + subdir: "fonts", + }, + { + protocol: [ + "fuchsia.logger.LogSink", + "fuchsia.tracing.provider.Registry", + ], + from: "parent", + to: "#isolated_font_provider", + }, + ], + facets: { + "fuchsia.test": { + "deprecated-allowed-packages": [ "fonts" ], + }, + }, +} diff --git a/build/config/fuchsia/test/gfx_test_ui_stack.shard.test-cml b/build/config/fuchsia/test/gfx_test_ui_stack.shard.test-cml new file mode 100644 index 000000000000..2e51f033fe73 --- /dev/null +++ b/build/config/fuchsia/test/gfx_test_ui_stack.shard.test-cml @@ -0,0 +1,49 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Used in tests which are hard-coded for the Scenic/GFX API-set. +// Use test_ui_stack.shard.test-cml when testing for Flatland, or when the +// choice of API-set is not important. +{ + include: [ + "//build/config/fuchsia/test/sysmem.shard.test-cml", + ], + children: [ + { + name: "test_ui_stack", + url: "fuchsia-pkg://fuchsia.com/gfx-scene-manager-test-ui-stack#meta/test-ui-stack.cm", + }, + ], + offer: [ + { + protocol: [ + "fuchsia.logger.LogSink", + "fuchsia.scheduler.ProfileProvider", + "fuchsia.sysmem.Allocator", + "fuchsia.tracing.provider.Registry", + "fuchsia.vulkan.loader.Loader", + ], + from: "parent", + to: "#test_ui_stack", + }, + ], + use: [ + { + protocol: [ + "fuchsia.accessibility.semantics.SemanticsManager", + "fuchsia.element.GraphicalPresenter", + "fuchsia.ui.composition.Allocator", + "fuchsia.ui.composition.Flatland", + "fuchsia.ui.input3.Keyboard", + "fuchsia.ui.scenic.Scenic", + ], + from: "#test_ui_stack", + }, + ], + facets: { + "fuchsia.test": { + "deprecated-allowed-packages": [ "gfx-scene-manager-test-ui-stack" ], + }, + }, +} diff --git a/build/config/fuchsia/test/jit_capabilities.test-cmx b/build/config/fuchsia/test/jit_capabilities.test-cmx deleted file mode 100644 index ff70e256bea1..000000000000 --- a/build/config/fuchsia/test/jit_capabilities.test-cmx +++ /dev/null @@ -1,7 +0,0 @@ -{ - "sandbox": { - "features": [ - "deprecated-ambient-replace-as-executable" - ] - } -} diff --git a/build/config/fuchsia/test/logger.shard.test-cml b/build/config/fuchsia/test/logger.shard.test-cml new file mode 100644 index 000000000000..be0881dd424f --- /dev/null +++ b/build/config/fuchsia/test/logger.shard.test-cml @@ -0,0 +1,8 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + use: [ + { protocol: [ "fuchsia.logger.Log" ] }, + ], +} diff --git a/build/config/fuchsia/test/mark_vmo_executable.shard.test-cml b/build/config/fuchsia/test/mark_vmo_executable.shard.test-cml new file mode 100644 index 000000000000..ac07c1bde181 --- /dev/null +++ b/build/config/fuchsia/test/mark_vmo_executable.shard.test-cml @@ -0,0 +1,12 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + use: [ + { + protocol: [ + "fuchsia.kernel.VmexResource", + ], + }, + ], +} diff --git a/build/config/fuchsia/test/minimum.shard.test-cml b/build/config/fuchsia/test/minimum.shard.test-cml new file mode 100644 index 000000000000..17b49278ad9b --- /dev/null +++ b/build/config/fuchsia/test/minimum.shard.test-cml @@ -0,0 +1,78 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + include: [ + "syslog/client.shard.cml", + ], + // Add capability providers. + children: [ + { + name: "build-info-service", + url: "fuchsia-pkg://fuchsia.com/fake-build-info#meta/fake_build_info.cm", + }, + { + name: "intl_property_manager", + url: "fuchsia-pkg://fuchsia.com/intl_property_manager#meta/intl_property_manager.cm", + }, + ], + offer: [ + { + protocol: "fuchsia.logger.LogSink", + from: "parent", + to: [ "#intl_property_manager" ], + } + ], + use: [ + { + directory: "config-data", + rights: [ "r*" ], + path: "/config/data", + }, + { + storage: "cache", + path: "/cache", + }, + { + storage: "custom_artifacts", + path: "/custom_artifacts", + }, + { + storage: "data", + path: "/data", + }, + { + storage: "tmp", + path: "/tmp", + }, + { + protocol: [ "fuchsia.buildinfo.Provider" ], + from: "#build-info-service", + }, + { + protocol: [ "fuchsia.intl.PropertyProvider" ], + from: "#intl_property_manager", + }, + { + protocol: [ + "fuchsia.hwinfo.Product", + "fuchsia.media.ProfileProvider", + "fuchsia.process.Launcher", + ], + }, + { + protocol: [ + "fuchsia.tracing.perfetto.ProducerConnector", + ], + availability: "optional", + }, + ], + facets: { + "fuchsia.test": { + "deprecated-allowed-packages": [ + "fake-build-info", + "intl_property_manager", + ], + }, + }, +} diff --git a/build/config/fuchsia/test/minimum_capabilities.test-cmx b/build/config/fuchsia/test/minimum_capabilities.test-cmx deleted file mode 100644 index a1d469df1582..000000000000 --- a/build/config/fuchsia/test/minimum_capabilities.test-cmx +++ /dev/null @@ -1,29 +0,0 @@ -{ - "facets": { - "fuchsia.test": { - "injected-services": { - "fuchsia.intl.PropertyProvider": "fuchsia-pkg://fuchsia.com/intl_property_manager#meta/intl_property_manager.cmx" - }, - "system-services": [ - "fuchsia.boot.ReadOnlyLog" - ] - } - }, - "sandbox": { - "dev": [ - "null", - "zero" - ], - "features": [ - "isolated-persistent-storage", - "isolated-temp" - ], - "services": [ - "fuchsia.intl.PropertyProvider", - "fuchsia.logger.LogSink", - "fuchsia.process.Launcher", - "fuchsia.sys.Launcher", - "fuchsia.sys.Loader" - ] - } -} diff --git a/build/config/fuchsia/test/network.shard.test-cml b/build/config/fuchsia/test/network.shard.test-cml new file mode 100644 index 000000000000..1fd4fa7cf4ac --- /dev/null +++ b/build/config/fuchsia/test/network.shard.test-cml @@ -0,0 +1,20 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + use: [ + { + directory: "root-ssl-certificates", + rights: [ "r*" ], + path: "/config/ssl", + }, + { + protocol: [ + "fuchsia.device.NameProvider", // Required by FDIO. + "fuchsia.net.interfaces.State", + "fuchsia.net.name.Lookup", + "fuchsia.posix.socket.Provider", + ], + }, + ], +} diff --git a/build/config/fuchsia/test/network_capabilities.test-cmx b/build/config/fuchsia/test/network_capabilities.test-cmx deleted file mode 100644 index dfeb131a48ac..000000000000 --- a/build/config/fuchsia/test/network_capabilities.test-cmx +++ /dev/null @@ -1,25 +0,0 @@ -{ - "facets": { - "fuchsia.test": { - "injected-services": { - "fuchsia.net.NameLookup": "fuchsia-pkg://fuchsia.com/dns-resolver#meta/dns-resolver.cmx", - "fuchsia.net.interfaces.State": "fuchsia-pkg://fuchsia.com/netstack#meta/netstack.cmx", - "fuchsia.posix.socket.Provider": "fuchsia-pkg://fuchsia.com/netstack#meta/netstack.cmx" - }, - "system-services": [ - "fuchsia.device.NameProvider" - ], - }, - }, - "sandbox": { - "features": [ - "root-ssl-certificates" - ], - "services": [ - "fuchsia.device.NameProvider", - "fuchsia.net.NameLookup", - "fuchsia.net.interfaces.State", - "fuchsia.posix.socket.Provider" - ] - } -} \ No newline at end of file diff --git a/build/config/fuchsia/test/platform_video_codecs.shard.test-cml b/build/config/fuchsia/test/platform_video_codecs.shard.test-cml new file mode 100644 index 000000000000..13b5a1b7947f --- /dev/null +++ b/build/config/fuchsia/test/platform_video_codecs.shard.test-cml @@ -0,0 +1,48 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + include: [ + "//build/config/fuchsia/test/sysmem.shard.test-cml", + ], + children: [ + { + // Run an isolated instance of codec_factory so that tests can run on + // system images that don't run it. + name: "isolated_codec_factory", + url: "fuchsia-pkg://fuchsia.com/codec_factory#meta/codec_factory.cm", + }, + ], + offer: [ + { + protocol: [ + "fuchsia.logger.LogSink", + "fuchsia.sysinfo.SysInfo", + "fuchsia.sysmem.Allocator", + ], + from: "parent", + to: "#isolated_codec_factory", + }, + { + directory: "dev-mediacodec", + from: "parent", + to: "#isolated_codec_factory", + }, + { + directory: "dev-gpu", + from: "parent", + to: "#isolated_codec_factory", + }, + ], + use: [ + { + protocol: "fuchsia.mediacodec.CodecFactory", + from: "#isolated_codec_factory", + }, + ], + facets: { + "fuchsia.test": { + "deprecated-allowed-packages": [ "codec_factory" ], + }, + }, +} diff --git a/build/config/fuchsia/test/present_view.shard.test-cml b/build/config/fuchsia/test/present_view.shard.test-cml new file mode 100644 index 000000000000..4e15ad50b4b2 --- /dev/null +++ b/build/config/fuchsia/test/present_view.shard.test-cml @@ -0,0 +1,42 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + children: [ + { + name: "isolated_a11y_manager", + url: "fuchsia-pkg://fuchsia.com/a11y-manager#meta/a11y-manager.cm", + }, + { + name: "isolated_text_manager", + url: "fuchsia-pkg://fuchsia.com/text_manager#meta/text_manager.cm", + }, + ], + offer: [ + { + protocol: "fuchsia.logger.LogSink", + from: "parent", + to: [ + "#isolated_a11y_manager", + "#isolated_text_manager", + ], + }, + ], + use: [ + { + protocol: [ + "fuchsia.ui.composition.Allocator", + "fuchsia.ui.composition.Flatland", + "fuchsia.ui.scenic.Scenic", + ], + }, + { + protocol: "fuchsia.accessibility.semantics.SemanticsManager", + from: "#isolated_a11y_manager", + }, + { + protocol: "fuchsia.ui.input3.Keyboard", + from: "#isolated_text_manager", + }, + ], +} diff --git a/build/config/fuchsia/test/present_view_capabilities.test-cmx b/build/config/fuchsia/test/present_view_capabilities.test-cmx deleted file mode 100644 index 201c8b2212ad..000000000000 --- a/build/config/fuchsia/test/present_view_capabilities.test-cmx +++ /dev/null @@ -1,24 +0,0 @@ -{ - "facets": { - "fuchsia.test": { - "injected-services": { - "fuchsia.accessibility.semantics.SemanticsManager": "fuchsia-pkg://fuchsia.com/a11y-manager#meta/a11y-manager.cmx", - "fuchsia.ui.input3.Keyboard": "fuchsia-pkg://fuchsia.com/ime_service#meta/ime_service.cmx", - }, - "system-services": [ - "fuchsia.sysmem.Allocator", - "fuchsia.ui.policy.Presenter", - "fuchsia.ui.scenic.Scenic" - ] - } - }, - "sandbox": { - "services": [ - "fuchsia.accessibility.semantics.SemanticsManager", - "fuchsia.sysmem.Allocator", - "fuchsia.ui.input3.Keyboard", - "fuchsia.ui.policy.Presenter", - "fuchsia.ui.scenic.Scenic" - ] - } -} diff --git a/build/config/fuchsia/test/read_debug_data.test-cmx b/build/config/fuchsia/test/read_debug_data.test-cmx deleted file mode 100644 index b0c95b088304..000000000000 --- a/build/config/fuchsia/test/read_debug_data.test-cmx +++ /dev/null @@ -1,7 +0,0 @@ -{ - "sandbox": { - "features": [ - "hub" - ] - } -} \ No newline at end of file diff --git a/build/config/fuchsia/test/sysmem.shard.test-cml b/build/config/fuchsia/test/sysmem.shard.test-cml new file mode 100644 index 000000000000..8bebd998b956 --- /dev/null +++ b/build/config/fuchsia/test/sysmem.shard.test-cml @@ -0,0 +1,10 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + use: [ + { + protocol: "fuchsia.sysmem.Allocator", + }, + ], +} diff --git a/build/config/fuchsia/test/system_test_minimum.shard.test-cml b/build/config/fuchsia/test/system_test_minimum.shard.test-cml new file mode 100644 index 000000000000..6efde20f7708 --- /dev/null +++ b/build/config/fuchsia/test/system_test_minimum.shard.test-cml @@ -0,0 +1,46 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + include: [ + "syslog/client.shard.cml", + ], + use: [ + { + directory: "config-data", + rights: [ "r*" ], + path: "/config/data", + }, + { + storage: "cache", + path: "/cache", + }, + { + storage: "custom_artifacts", + path: "/custom_artifacts", + }, + { + storage: "data", + path: "/data", + }, + { + storage: "tmp", + path: "/tmp", + }, + { + protocol: [ + "fuchsia.buildinfo.Provider", + "fuchsia.hwinfo.Product", + "fuchsia.intl.PropertyProvider", + "fuchsia.media.ProfileProvider", + "fuchsia.process.Launcher", + ], + }, + { + protocol: [ + "fuchsia.tracing.perfetto.ProducerConnector", + ], + availability: "optional", + }, + ], +} diff --git a/build/config/fuchsia/test/test_fonts.shard.test-cml b/build/config/fuchsia/test/test_fonts.shard.test-cml new file mode 100644 index 000000000000..6610e31a2c5f --- /dev/null +++ b/build/config/fuchsia/test/test_fonts.shard.test-cml @@ -0,0 +1,37 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + children: [ + { + name: "test_fonts", + url: "fuchsia-pkg://fuchsia.com/fonts#meta/fonts.cm", + }, + ], + offer: [ + { + protocol: "fuchsia.logger.LogSink", + from: "parent", + to: "#test_fonts", + }, + { + directory: "pkg", + subdir: "test_fonts", + from: "framework", + to: "#test_fonts", + as: "config-data", + rights: [ "r*" ], + } + ], + use: [ + { + protocol: "fuchsia.fonts.Provider", + from: "#test_fonts", + }, + ], + facets: { + "fuchsia.test": { + "deprecated-allowed-packages": [ "fonts" ], + }, + }, +} diff --git a/build/config/fuchsia/test/test_logger_capabilities.test-cmx b/build/config/fuchsia/test/test_logger_capabilities.test-cmx deleted file mode 100644 index 68b2a67012af..000000000000 --- a/build/config/fuchsia/test/test_logger_capabilities.test-cmx +++ /dev/null @@ -1,7 +0,0 @@ -{ - "sandbox": { - "services": [ - "fuchsia.logger.Log" - ] - } -} \ No newline at end of file diff --git a/build/config/fuchsia/test/test_ui_stack.shard.test-cml b/build/config/fuchsia/test/test_ui_stack.shard.test-cml new file mode 100644 index 000000000000..102867cf1ae1 --- /dev/null +++ b/build/config/fuchsia/test/test_ui_stack.shard.test-cml @@ -0,0 +1,48 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + include: [ "//build/config/fuchsia/test/sysmem.shard.test-cml" ], + children: [ + { + name: "test_ui_stack", + url: "fuchsia-pkg://fuchsia.com/flatland-scene-manager-test-ui-stack#meta/test-ui-stack.cm", + }, + ], + use: [ + { + protocol: [ + "fuchsia.accessibility.semantics.SemanticsManager", + "fuchsia.element.GraphicalPresenter", + "fuchsia.ui.composition.Allocator", + "fuchsia.ui.composition.Flatland", + "fuchsia.ui.input3.Keyboard", + "fuchsia.ui.scenic.Scenic", + ], + from: "#test_ui_stack", + }, + ], + offer: [ + { + storage: "tmp", + from: "parent", + to: "#test_ui_stack", + }, + { + protocol: [ + "fuchsia.logger.LogSink", + "fuchsia.scheduler.ProfileProvider", + "fuchsia.sysmem.Allocator", + "fuchsia.tracing.provider.Registry", + "fuchsia.vulkan.loader.Loader", + ], + from: "parent", + to: "#test_ui_stack", + }, + ], + facets: { + "fuchsia.test": { + "deprecated-allowed-packages": [ "flatland-scene-manager-test-ui-stack" ], + }, + }, +} diff --git a/build/config/fuchsia/test/vulkan_capabilities.test-cmx b/build/config/fuchsia/test/vulkan_capabilities.test-cmx deleted file mode 100644 index 0436ffd5e3c3..000000000000 --- a/build/config/fuchsia/test/vulkan_capabilities.test-cmx +++ /dev/null @@ -1,19 +0,0 @@ -{ - "facets": { - "fuchsia.test": { - "system-services": [ - "fuchsia.sysmem.Allocator", - "fuchsia.vulkan.loader.Loader" - ] - } - }, - "sandbox": { - "features": [ - "vulkan" - ], - "services": [ - "fuchsia.sysmem.Allocator", - "fuchsia.vulkan.loader.Loader" - ] - } -} \ No newline at end of file diff --git a/build/config/fuchsia/test/web_engine_required_capabilities.test-cmx b/build/config/fuchsia/test/web_engine_required_capabilities.test-cmx deleted file mode 100644 index 4cb61fe7f1bd..000000000000 --- a/build/config/fuchsia/test/web_engine_required_capabilities.test-cmx +++ /dev/null @@ -1,25 +0,0 @@ -{ - "facets": { - "fuchsia.test": { - "injected-services": { - "fuchsia.fonts.Provider": "fuchsia-pkg://fuchsia.com/fonts#meta/fonts.cmx", - "fuchsia.memorypressure.Provider": "fuchsia-pkg://fuchsia.com/memory_monitor#meta/memory_monitor.cmx", - "fuchsia.web.ContextProvider": "fuchsia-pkg://fuchsia.com/web_engine#meta/context_provider.cmx", - }, - "system-services": [ - "fuchsia.device.NameProvider", - "fuchsia.scheduler.ProfileProvider", - "fuchsia.sysmem.Allocator" - ] - } - }, - "sandbox": { - "services": [ - "fuchsia.device.NameProvider", - "fuchsia.fonts.Provider", - "fuchsia.memorypressure.Provider", - "fuchsia.sysmem.Allocator", - "fuchsia.web.ContextProvider" - ] - } -} \ No newline at end of file diff --git a/build/config/fuchsia/test/web_instance.shard.test-cml b/build/config/fuchsia/test/web_instance.shard.test-cml new file mode 100644 index 000000000000..b996f4ab6826 --- /dev/null +++ b/build/config/fuchsia/test/web_instance.shard.test-cml @@ -0,0 +1,21 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + include: [ + "//build/config/fuchsia/test/audio_output.shard.test-cml", + "//build/config/fuchsia/test/fonts.shard.test-cml", + "//build/config/fuchsia/test/mark_vmo_executable.shard.test-cml", + "//build/config/fuchsia/test/network.shard.test-cml", + "//build/config/fuchsia/test/platform_video_codecs.shard.test-cml", + "//build/config/fuchsia/test/test_ui_stack.shard.test-cml", + "vulkan/client.shard.cml", + ], + use: [ + { + protocol: [ + "fuchsia.memorypressure.Provider", + ], + }, + ], +} diff --git a/build/config/gcc/BUILD.gn b/build/config/gcc/BUILD.gn index 154b259b5fae..147ebfc53426 100644 --- a/build/config/gcc/BUILD.gn +++ b/build/config/gcc/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -63,8 +63,9 @@ config("symbol_visibility_default") { # configs += [ "//build/config/gcc:rpath_for_built_shared_libraries" ] # } config("rpath_for_built_shared_libraries") { - if (!is_android) { - # Note: Android doesn't support rpath. + if (!is_android && current_os != "aix" && !is_castos) { + # Note: Android, Aix don't support rpath. Chromecast has its own logic for + # setting the rpath in //build/config/chromecast. if (current_toolchain != default_toolchain || gcc_target_rpath == "") { ldflags = [ # Want to pass "\$". GN will re-escape as required for ninja. diff --git a/build/config/get_host_byteorder.py b/build/config/get_host_byteorder.py index fc01d85718ed..7cc0cdff804f 100755 --- a/build/config/get_host_byteorder.py +++ b/build/config/get_host_byteorder.py @@ -1,11 +1,10 @@ -#!/usr/bin/env python -# Copyright 2017 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Get Byteorder of host architecture""" -from __future__ import print_function import sys diff --git a/build/config/host_byteorder.gni b/build/config/host_byteorder.gni index 48a1a7f1e380..1c3c72dd6a24 100644 --- a/build/config/host_byteorder.gni +++ b/build/config/host_byteorder.gni @@ -1,4 +1,4 @@ -# Copyright (c) 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/ios/BUILD.gn b/build/config/ios/BUILD.gn index c4cd317b4858..863d1d0ec75f 100644 --- a/build/config/ios/BUILD.gn +++ b/build/config/ios/BUILD.gn @@ -1,29 +1,14 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/config/ios/ios_sdk.gni") +import("//build/toolchain/apple/toolchain.gni") import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") import("//build/toolchain/toolchain.gni") import("//build_overrides/build.gni") -declare_args() { - # Enabling this option makes clang compile to an intermediate - # representation ("bitcode"), and not to native code. This is preferred - # when including WebRTC in the apps that will be sent to Apple's App Store - # and mandatory for the apps that run on watchOS or tvOS. - # The option only works when building with Xcode (use_xcode_clang = true). - # Mimicking how Xcode handles it, the production builds (is_debug = false) - # get real bitcode sections added, while the debug builds (is_debug = true) - # only get bitcode-section "markers" added in them. - # NOTE: This option is ignored when building versions for the iOS simulator, - # where a part of libvpx is compiled from the assembly code written using - # Intel assembly syntax; Yasm / Nasm do not support emitting bitcode parts. - # That is not a limitation for now as Xcode mandates the presence of bitcode - # only when building bitcode-enabled projects for real devices (ARM CPUs). - enable_ios_bitcode = false -} - # This is included by reference in the //build/config/compiler config that # is applied to all targets. It is here to separate out the logic. config("compiler") { @@ -77,9 +62,22 @@ config("compiler") { "5", ] - # Without this, the constructors and destructors of a C++ object inside - # an Objective C struct won't be called, which is very bad. - cflags_objcc = [ "-fobjc-call-cxx-cdtors" ] + cflags_objcc = [ + # Without this, the constructors and destructors of a C++ object inside + # an Objective C struct won't be called, which is very bad. + "-fobjc-call-cxx-cdtors", + + # When using -std=c++20 or higher, clang automatically returns true for + # `__has_feature(modules)` as it enables cxx modules. This is problematic + # because Objective-C code uses this to detect whether `@import` can be + # used (this feature is also named modules). + # + # Since Chromium does not yet enable cxx modules, nor clang modules, + # force disable the cxx modules, which cause `__has_features(modules)` + # to return false unless clang modules are explicitly enabled. + "-Xclang", + "-fno-cxx-modules", + ] ldflags = common_flags } @@ -94,7 +92,7 @@ config("runtime_library") { # Rebase the value in that case since gn does not convert paths in compiler # flags (since it is not aware they are paths). _sdk_root = ios_sdk_path - if (use_system_xcode && use_goma) { + if (use_system_xcode && (use_goma || use_remoteexec)) { _sdk_root = rebase_path(ios_sdk_path, root_build_dir) } @@ -114,14 +112,13 @@ config("runtime_library") { "-iframework", "$_sdk_root/System/iOSSupport/System/Library/Frameworks", ] - } - if (use_xcode_clang && enable_ios_bitcode && target_environment == "device") { - if (is_debug) { - common_flags += [ "-fembed-bitcode-marker" ] - } else { - common_flags += [ "-fembed-bitcode" ] - } + swiftflags += [ + "-isystem", + "$_sdk_root/System/iOSSupport/usr/include", + "-Fsystem", + "$_sdk_root/System/iOSSupport/System/Library/Frameworks", + ] } asmflags = common_flags @@ -163,30 +160,45 @@ config("ios_dynamic_flags") { ldflags = [ # Always load Objective-C categories and class. "-Wl,-ObjC", - - # Uses version 2 of Objective-C ABI. - "-Wl,-objc_abi_version,2", ] # The path to the Swift compatibility libraries (required to run code built # with version N of the SDK on older version of the OS) is relative to the - # toolchains directory and changes with the environment. - _swift_compatibility_libs_dir_prefix = "$ios_toolchains_path/usr/lib/swift" + # toolchains directory and changes with the environment when using the + # system toolchain. When using the hermetic swift toolchain instead, those + # libraries are relative to $swift_toolchain_path. + if (swift_toolchain_path == "") { + _swift_compatibility_libs_prefix = ios_toolchains_path + } else { + _swift_compatibility_libs_prefix = swift_toolchain_path + } + if (target_environment == "simulator") { - _swift_compatibility_libs_dir = - "$_swift_compatibility_libs_dir_prefix/iphonesimulator" + _swift_compatibility_libs_suffix = "iphonesimulator" } else if (target_environment == "device") { - _swift_compatibility_libs_dir = - "$_swift_compatibility_libs_dir_prefix/iphoneos" + _swift_compatibility_libs_suffix = "iphoneos" } else if (target_environment == "catalyst") { - _swift_compatibility_libs_dir = - "$_swift_compatibility_libs_dir_prefix/maccatalyst" + # The Swift compatibility libraries have changed location starting with + # Xcode 13.0, so check the version of Xcode when deciding which path to + # use. + if (xcode_version_int >= 1300) { + _swift_compatibility_libs_suffix = "macosx" + } else { + _swift_compatibility_libs_suffix = "maccatalyst" + } } lib_dirs = [ "$ios_sdk_path/usr/lib/swift", - _swift_compatibility_libs_dir, + "$_swift_compatibility_libs_prefix/usr/lib/swift/" + + "$_swift_compatibility_libs_suffix", ] + + # When building for catalyst, some Swift support libraries are in a + # different directory which needs to be added to the search path. + if (target_environment == "catalyst") { + lib_dirs += [ "$ios_sdk_path/System/iOSSupport/usr/lib/swift" ] + } } config("ios_shared_library_flags") { @@ -196,20 +208,33 @@ config("ios_shared_library_flags") { ] } -config("disable_implicit_retain_self_warning") { - cflags_objc = [ "-Wno-implicit-retain-self" ] - cflags_objcc = cflags_objc -} - config("xctest_config") { - framework_dirs = [ "$ios_sdk_platform_path/Developer/Library/Frameworks" ] + # Add some directories to the system framework search path to make + # them available to the compiler while silencing warnings in the + # framework headers. This is required for XCTest. + common_flags = [ + "-iframework", + rebase_path("$ios_sdk_platform_path/Developer/Library/Frameworks", + root_build_dir), + "-iframework", + rebase_path("$ios_sdk_path/Developer/Library/Frameworks", root_build_dir), + ] + cflags = common_flags + ldflags = common_flags + swiftflags = common_flags + include_dirs = [ "$ios_sdk_platform_path/Developer/usr/lib" ] + lib_dirs = [ "$ios_sdk_platform_path/Developer/usr/lib" ] frameworks = [ "Foundation.framework", "XCTest.framework", ] } +config("enable_swift_cxx_interop") { + swiftflags = [ "-enable-experimental-cxx-interop" ] +} + group("xctest") { public_configs = [ ":xctest_config" ] } @@ -229,7 +254,7 @@ _xctrunner_path = # # To workaround this, add a target that pretends to create those files # (but does nothing). See https://crbug.com/1061487 for why this is needed. -if (use_system_xcode && use_goma) { +if (use_system_xcode && (use_goma || use_remoteexec)) { action("copy_xctrunner_app") { testonly = true script = "//build/noop.py" @@ -258,7 +283,10 @@ action("xctest_runner_without_arm64e") { xcode_version, ] - if (use_system_xcode && use_goma) { - deps = [ ":copy_xctrunner_app" ] + # When running under ASan, the ASan runtime library must be packaged alongside + # the test runner binary. + deps = [ "//build/config/sanitizers:deps" ] + if (use_system_xcode && (use_goma || use_remoteexec)) { + deps += [ ":copy_xctrunner_app" ] } } diff --git a/build/config/ios/Host-Info.plist b/build/config/ios/Host-Info.plist index 9f6f5deef9e4..6898c15fa1c5 100644 --- a/build/config/ios/Host-Info.plist +++ b/build/config/ios/Host-Info.plist @@ -9,7 +9,7 @@ CFBundleExecutable ${EXECUTABLE_NAME} CFBundleIdentifier - ${IOS_BUNDLE_ID_PREFIX}.test.${EXECUTABLE_NAME:rfc1034identifier} + ${BUNDLE_IDENTIFIER} CFBundleInfoDictionaryVersion 6.0 CFBundleName diff --git a/build/config/ios/Module-Info.plist b/build/config/ios/Module-Info.plist index d1bf77faf080..e1b09841541a 100644 --- a/build/config/ios/Module-Info.plist +++ b/build/config/ios/Module-Info.plist @@ -7,7 +7,7 @@ CFBundleExecutable ${EXECUTABLE_NAME} CFBundleIdentifier - ${IOS_BUNDLE_ID_PREFIX}.${MODULE_BUNDLE_ID:rfc1034identifier} + ${BUNDLE_IDENTIFIER} CFBundleInfoDictionaryVersion 6.0 CFBundleName diff --git a/build/config/ios/asset_catalog.gni b/build/config/ios/asset_catalog.gni index 84dd92cce209..8695bf7f9e3e 100644 --- a/build/config/ios/asset_catalog.gni +++ b/build/config/ios/asset_catalog.gni @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -11,7 +11,7 @@ import("//build/config/ios/ios_sdk.gni") # The create_bundle target requires that all asset catalogs are part of an # .xcasset bundle. This requirement comes from actool that only receives # the path to the .xcasset bundle directory and not to the individual -# .imageset directories. +# assets directories. # # The requirement is a bit problematic as it prevents compiling only a # subset of the asset catakig that are contained in a .xcasset. This template @@ -48,69 +48,63 @@ template("asset_catalog") { assert(defined(invoker.asset_type) && invoker.asset_type != "", "asset_type must be defined and not empty for $target_name") - if (is_fat_secondary_toolchain) { - group(target_name) { - public_deps = [ ":$target_name($primary_fat_toolchain_name)" ] - } - } else { - _copy_target_name = target_name + "__copy" - _data_target_name = target_name + _copy_target_name = target_name + "__copy" + _data_target_name = target_name - _sources = invoker.sources - _outputs = [] + _sources = invoker.sources + _outputs = [] - # The compilation of resources into Assets.car is enabled automatically - # by the "create_bundle" target if any of the "bundle_data" sources's - # path is in a .xcassets directory and matches one of the know asset - # catalog type. - _xcassets_dir = "$target_gen_dir/${target_name}.xcassets" - _output_dir = "$_xcassets_dir/" + - get_path_info(get_path_info(_sources[0], "dir"), "file") + # The compilation of resources into Assets.car is enabled automatically + # by the "create_bundle" target if any of the "bundle_data" sources's + # path is in a .xcassets directory and matches one of the know asset + # catalog type. + _xcassets_dir = "$target_gen_dir/${target_name}.xcassets" + _output_dir = "$_xcassets_dir/" + + get_path_info(get_path_info(_sources[0], "dir"), "file") - foreach(_source, invoker.sources) { - _dir = get_path_info(_source, "dir") - _outputs += [ "$_output_dir/" + get_path_info(_source, "file") ] + foreach(_source, invoker.sources) { + _dir = get_path_info(_source, "dir") + _outputs += [ "$_output_dir/" + get_path_info(_source, "file") ] - assert(get_path_info(_dir, "extension") == invoker.asset_type, - "$_source dirname must have .${invoker.asset_type} extension") - } + assert(get_path_info(_dir, "extension") == invoker.asset_type, + "$_source dirname must have .${invoker.asset_type} extension") + } - action(_copy_target_name) { - # Forward "deps", "public_deps" and "testonly" in case some of the - # source files are generated. - forward_variables_from(invoker, - [ - "deps", - "public_deps", - "testonly", - ]) + action(_copy_target_name) { + # Forward "deps", "public_deps" and "testonly" in case some of the + # source files are generated. + forward_variables_from(invoker, + [ + "deps", + "public_deps", + "testonly", + ]) - script = "//build/config/ios/hardlink.py" + script = "//build/config/ios/hardlink.py" - visibility = [ ":$_data_target_name" ] - sources = _sources - outputs = _outputs + [ _xcassets_dir ] + visibility = [ ":$_data_target_name" ] + sources = _sources + outputs = _outputs + [ _xcassets_dir ] - args = [ - rebase_path(get_path_info(_sources[0], "dir"), root_build_dir), - rebase_path(_output_dir, root_build_dir), - ] - } + args = [ + rebase_path(get_path_info(_sources[0], "dir"), root_build_dir), + rebase_path(_output_dir, root_build_dir), + ] + } - bundle_data(_data_target_name) { - forward_variables_from(invoker, - "*", - [ - "deps", - "outputs", - "public_deps", - "sources", - ]) + bundle_data(_data_target_name) { + forward_variables_from(invoker, + "*", + [ + "deps", + "outputs", + "public_deps", + "sources", + ]) - sources = _outputs - outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] - public_deps = [ ":$_copy_target_name" ] - } + sources = _outputs + outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] + public_deps = [ ":$_copy_target_name" ] } } @@ -148,3 +142,9 @@ template("launchimage") { asset_type = "launchimage" } } +template("symbolset") { + asset_catalog(target_name) { + forward_variables_from(invoker, "*", [ "asset_type" ]) + asset_type = "symbolset" + } +} diff --git a/build/config/ios/bundle_data_from_filelist.gni b/build/config/ios/bundle_data_from_filelist.gni new file mode 100644 index 000000000000..763dc8673620 --- /dev/null +++ b/build/config/ios/bundle_data_from_filelist.gni @@ -0,0 +1,24 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +assert(current_os == "ios") + +template("bundle_data_from_filelist") { + assert(defined(invoker.filelist_name), "Requires setting filelist_name") + + _filelist_content = read_file(invoker.filelist_name, "list lines") + bundle_data(target_name) { + forward_variables_from(invoker, + "*", + [ + "filelist_name", + "sources", + ]) + sources = filter_exclude(_filelist_content, [ "#*" ]) + if (!defined(outputs)) { + outputs = [ "{{bundle_resources_dir}}/" + + "{{source_root_relative_dir}}/{{source_file_part}}" ] + } + } +} diff --git a/build/config/ios/codesign.py b/build/config/ios/codesign.py index 15d25a78df5d..fd96f312d658 100644 --- a/build/config/ios/codesign.py +++ b/build/config/ios/codesign.py @@ -1,8 +1,7 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import print_function import argparse import codecs @@ -14,6 +13,7 @@ import plistlib import shutil import subprocess +import stat import sys import tempfile @@ -226,6 +226,8 @@ def ValidToSignBundle(self, bundle_identifier): def Install(self, installation_path): """Copies mobile provisioning profile info to |installation_path|.""" shutil.copy2(self.path, installation_path) + st = os.stat(installation_path) + os.chmod(installation_path, st.st_mode | stat.S_IWUSR) class Entitlements(object): @@ -273,7 +275,8 @@ def WriteTo(self, target_path): plistlib.dump(self._data, fp) -def FindProvisioningProfile(bundle_identifier, required): +def FindProvisioningProfile(provisioning_profile_paths, bundle_identifier, + required): """Finds mobile provisioning profile to use to sign bundle. Args: @@ -283,8 +286,9 @@ def FindProvisioningProfile(bundle_identifier, required): The ProvisioningProfile object that can be used to sign the Bundle object or None if no matching provisioning profile was found. """ - provisioning_profile_paths = glob.glob( - os.path.join(GetProvisioningProfilesDir(), '*.mobileprovision')) + if not provisioning_profile_paths: + provisioning_profile_paths = glob.glob( + os.path.join(GetProvisioningProfilesDir(), '*.mobileprovision')) # Iterate over all installed mobile provisioning profiles and filter those # that can be used to sign the bundle, ignoring expired ones. @@ -304,8 +308,8 @@ def FindProvisioningProfile(bundle_identifier, required): if not valid_provisioning_profiles: if required: sys.stderr.write( - 'Error: no mobile provisioning profile found for "%s".\n' % - bundle_identifier) + 'Error: no mobile provisioning profile found for "%s" in %s.\n' % + (bundle_identifier, provisioning_profile_paths)) sys.exit(1) return None @@ -401,7 +405,7 @@ def GenerateBundleInfoPlist(bundle, plist_compiler, partial_plist): # Invoke the plist_compiler script. It needs to be a python script. subprocess.check_call([ - 'python', + 'python3', plist_compiler, 'merge', '-f', @@ -459,6 +463,14 @@ def _Register(parser): parser.add_argument( '--plist-compiler-path', '-P', action='store', help='path to the plist compiler script (for --partial-info-plist)') + parser.add_argument( + '--mobileprovision', + '-m', + action='append', + default=[], + dest='mobileprovision_files', + help='list of mobileprovision files to use. If empty, uses the files ' + + 'in $HOME/Library/MobileDevice/Provisioning Profiles') parser.set_defaults(no_signature=False) @staticmethod @@ -554,7 +566,8 @@ def _Execute(args): # provisioning is found). provisioning_profile_required = args.identity != '-' provisioning_profile = FindProvisioningProfile( - bundle.identifier, provisioning_profile_required) + args.mobileprovision_files, bundle.identifier, + provisioning_profile_required) if provisioning_profile and args.platform != 'iphonesimulator': provisioning_profile.Install(embedded_provisioning_profile) @@ -629,12 +642,21 @@ def _Register(parser): parser.add_argument( '--info-plist', '-p', required=True, help='path to the bundle Info.plist') + parser.add_argument( + '--mobileprovision', + '-m', + action='append', + default=[], + dest='mobileprovision_files', + help='set of mobileprovision files to use. If empty, uses the files ' + + 'in $HOME/Library/MobileDevice/Provisioning Profiles') @staticmethod def _Execute(args): info_plist = LoadPlistFile(args.info_plist) bundle_identifier = info_plist['CFBundleIdentifier'] - provisioning_profile = FindProvisioningProfile(bundle_identifier, False) + provisioning_profile = FindProvisioningProfile(args.mobileprovision_files, + bundle_identifier, False) entitlements = GenerateEntitlements( args.entitlements_path, provisioning_profile, bundle_identifier) entitlements.WriteTo(args.path) @@ -652,11 +674,20 @@ def _Register(parser): '-b', required=True, help='bundle identifier') + parser.add_argument( + '--mobileprovision', + '-m', + action='append', + default=[], + dest='mobileprovision_files', + help='set of mobileprovision files to use. If empty, uses the files ' + + 'in $HOME/Library/MobileDevice/Provisioning Profiles') @staticmethod def _Execute(args): provisioning_profile_info = {} - provisioning_profile = FindProvisioningProfile(args.bundle_id, False) + provisioning_profile = FindProvisioningProfile(args.mobileprovision_files, + args.bundle_id, False) for key in ('team_identifier', 'name'): if provisioning_profile: provisioning_profile_info[key] = getattr(provisioning_profile, key) diff --git a/build/config/ios/compile_ib_files.py b/build/config/ios/compile_ib_files.py index 84781c177374..e42001601278 100644 --- a/build/config/ios/compile_ib_files.py +++ b/build/config/ios/compile_ib_files.py @@ -1,8 +1,7 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import print_function import argparse import logging diff --git a/build/config/ios/compile_xcassets_unittests.py b/build/config/ios/compile_xcassets_unittests.py index 7655df8c0570..8537e4ec2567 100644 --- a/build/config/ios/compile_xcassets_unittests.py +++ b/build/config/ios/compile_xcassets_unittests.py @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/ios/config.gni b/build/config/ios/config.gni index b25ecd942d21..c5c10c3f7e5c 100644 --- a/build/config/ios/config.gni +++ b/build/config/ios/config.gni @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/ios/dummy.py b/build/config/ios/dummy.py index b23b7dab965c..e88c7888e226 100644 --- a/build/config/ios/dummy.py +++ b/build/config/ios/dummy.py @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/ios/find_signing_identity.py b/build/config/ios/find_signing_identity.py index d508e2bb4630..37b3284e9792 100644 --- a/build/config/ios/find_signing_identity.py +++ b/build/config/ios/find_signing_identity.py @@ -1,8 +1,7 @@ -# Copyright (c) 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import print_function import argparse import os @@ -45,7 +44,8 @@ def ListIdentities(): def FindValidIdentity(pattern): """Find all identities matching the pattern.""" lines = list(l.strip() for l in ListIdentities().splitlines()) - # Look for something like "2) XYZ "iPhone Developer: Name (ABC)"" + # Look for something like + # 1) 123ABC123ABC123ABC****** "iPhone Developer: DeveloperName (Team)" regex = re.compile('[0-9]+\) ([A-F0-9]+) "([^"(]*) \(([^)"]*)\)"') result = [] @@ -53,8 +53,9 @@ def FindValidIdentity(pattern): res = regex.match(line) if res is None: continue - if pattern is None or pattern in res.group(2): - result.append(Identity(*res.groups())) + identifier, developer_name, team = res.groups() + if pattern is None or pattern in '%s (%s)' % (developer_name, team): + result.append(Identity(identifier, developer_name, team)) return result diff --git a/build/config/ios/generate_umbrella_header.py b/build/config/ios/generate_umbrella_header.py index 8547e18aa794..943c49c4df27 100644 --- a/build/config/ios/generate_umbrella_header.py +++ b/build/config/ios/generate_umbrella_header.py @@ -1,4 +1,4 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/ios/hardlink.py b/build/config/ios/hardlink.py index 38f60d401383..7f1be597808e 100644 --- a/build/config/ios/hardlink.py +++ b/build/config/ios/hardlink.py @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/ios/ios_sdk.gni b/build/config/ios/ios_sdk.gni index fbff8b433acf..14174696849c 100644 --- a/build/config/ios/ios_sdk.gni +++ b/build/config/ios/ios_sdk.gni @@ -1,11 +1,11 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/config/ios/config.gni") import("//build/config/ios/ios_sdk_overrides.gni") import("//build/toolchain/goma.gni") -import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") import("//build/toolchain/toolchain.gni") import("//build_overrides/build.gni") @@ -46,42 +46,16 @@ declare_args() { # Prefix for CFBundleIdentifier property of iOS bundles (correspond to the # "Organization Identifier" in Xcode). Code signing will fail if no mobile # provisioning for the selected code signing identify support that prefix. - ios_app_bundle_id_prefix = "org.chromium" - - # If non-empty, this list must contain valid cpu architecture, and the final - # build will be a multi-architecture build (aka fat build) supporting the - # main $target_cpu architecture and all of $additional_target_cpus. - # - # For example to build an application that will run on both arm64 and armv7 - # devices, you would use the following in args.gn file when running "gn args": - # - # target_os = "ios" - # target_cpu = "arm64" - # additional_target_cpus = [ "arm" ] - # - # You can also pass the value via "--args" parameter for "gn gen" command by - # using the syntax --args='additional_target_cpus=["arm"] target_cpu="arm64"'. - additional_target_cpus = [] -} + ios_app_bundle_id_prefix = "org.chromium.ost" -declare_args() { - # This variable is set by the toolchain. It is set to true if the toolchain - # is a secondary toolchain as part of a "fat" build. - is_fat_secondary_toolchain = false + # Paths to the mobileprovision files for the chosen code signing + # identity description and app bundle id prefix. + ios_mobileprovision_files = [] - # This variable is set by the toolchain. It is the name of the primary - # toolchain for the fat build (could be current_toolchain). - primary_fat_toolchain_name = "" + # Set to true if all test apps should use the same bundle id. + ios_use_shared_bundle_id_for_test_apps = true } -# Official builds may not use goma. -assert(!(use_goma && is_chrome_branded && is_official_build && - target_cpu == "arm64"), - "goma use is forbidden for official iOS builds.") - -assert(custom_toolchain == "" || additional_target_cpus == [], - "cannot define both custom_toolchain and additional_target_cpus") - # If codesigning is enabled, use must configure either a codesigning identity # or a filter to automatically select the codesigning identity. if (target_environment == "device" && ios_enable_code_signing) { @@ -94,25 +68,6 @@ if (target_environment == "device" && ios_enable_code_signing) { "pattern to match the identity to use).") } -# Initialize additional_toolchains from additional_target_cpus. Assert here -# that the list does not contains $target_cpu nor duplicates as this would -# cause weird errors during the build. -additional_toolchains = [] -if (additional_target_cpus != []) { - foreach(_additional_target_cpu, additional_target_cpus) { - assert(_additional_target_cpu != target_cpu, - "target_cpu must not be listed in additional_target_cpus") - - _toolchain = "//build/toolchain/ios:ios_clang_${_additional_target_cpu}_fat" - foreach(_additional_toolchain, additional_toolchains) { - assert(_toolchain != _additional_toolchain, - "additional_target_cpus must not contains duplicate values") - } - - additional_toolchains += [ _toolchain ] - } -} - if (ios_sdk_path == "") { # Compute default target. if (target_environment == "simulator") { @@ -139,10 +94,12 @@ if (ios_sdk_path == "") { ios_sdk_developer_dir, ] } - if (use_system_xcode && use_goma) { + if (use_system_xcode && (use_goma || use_remoteexec)) { ios_sdk_info_args += [ "--create_symlink_at", "sdk/xcode_links", + "--root_build_dir", + root_build_dir, ] } script_name = "//build/config/apple/sdk_info.py" @@ -183,3 +140,8 @@ if (target_environment == "device" && ios_enable_code_signing) { "trim string") } } + +if (ios_use_shared_bundle_id_for_test_apps) { + shared_bundle_id_for_test_apps = + "$ios_app_bundle_id_prefix.chrome.unittests.dev" +} diff --git a/build/config/ios/ios_sdk_overrides.gni b/build/config/ios/ios_sdk_overrides.gni index bd990bcce147..a2373c6c9d23 100644 --- a/build/config/ios/ios_sdk_overrides.gni +++ b/build/config/ios/ios_sdk_overrides.gni @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -7,11 +7,11 @@ declare_args() { # Version of iOS that we're targeting. - ios_deployment_target = "12.2" + ios_deployment_target = "15.0" } # Always assert that ios_deployment_target is used on non-iOS platforms to # prevent unused args warnings. if (!is_ios) { - assert(ios_deployment_target == "12.2" || true) + assert(ios_deployment_target == "15.0" || true) } diff --git a/build/config/ios/ios_test_runner_wrapper.gni b/build/config/ios/ios_test_runner_wrapper.gni index 115db7ffdb33..378323c4f5c8 100644 --- a/build/config/ios/ios_test_runner_wrapper.gni +++ b/build/config/ios/ios_test_runner_wrapper.gni @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -93,6 +93,13 @@ template("ios_test_runner_wrapper") { "${shards}", ] + if (xcode_version_int >= 1400) { + executable_args += [ + "--readline-timeout", + "600", + ] + } + data_deps = [ "//testing:test_scripts_shared" ] if (defined(invoker.data_deps)) { data_deps += invoker.data_deps @@ -124,19 +131,21 @@ template("ios_test_runner_wrapper") { _wrapper_output_name = wrapper_output_name } - # Test targets may attempt to generate multiple wrappers for a suite with - # multiple different toolchains when running with additional_target_cpus. - # Generate the wrapper script into root_out_dir rather than root_build_dir - # to ensure those wrappers are distinct. - wrapper_script = "${root_out_dir}/bin/${_wrapper_output_name}" + wrapper_script = "${root_build_dir}/bin/${_wrapper_output_name}" data = [] if (defined(invoker.data)) { data += invoker.data } data += [ - "//.vpython", "//ios/build/bots/scripts/", + "//ios/build/bots/scripts/plugin", + + # gRPC interface for iOS test plugin + "//ios/testing/plugin", + + # Variations test utilities used by variations_runner script. + "//testing/scripts/variations_seed_access_helper.py", "//testing/test_env.py", ] } diff --git a/build/config/ios/ios_test_runner_xcuitest.gni b/build/config/ios/ios_test_runner_xcuitest.gni new file mode 100644 index 000000000000..6aeb08b1fb34 --- /dev/null +++ b/build/config/ios/ios_test_runner_xcuitest.gni @@ -0,0 +1,72 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/ios/ios_test_runner_wrapper.gni") +import("//build/config/ios/rules.gni") + +# ios_test_runner_xcuitest are just ios_xcuitest_test with an +# ios_test_runner_wrapper. Currently used by Crashpad tests, which do not depend +# on EG2 (and therefore do not use ios_eg2_test) +template("ios_test_runner_xcuitest") { + assert(defined(invoker.xcode_test_application_name), + "xcode_test_application_name must be defined for $target_name") + assert( + defined(invoker.deps), + "deps must be defined for $target_name to include at least one xctest" + + "file.") + + _target_name = target_name + _test_target = "${target_name}_test" + ios_xcuitest_test(_test_target) { + forward_variables_from(invoker, + [ + "xcode_test_application_name", + "xctest_bundle_principal_class", + "bundle_deps", + "deps", + "data_deps", + ]) + + # TODO(crbug.com/1056328) Because we change the target name, the subnodes + # are going to append with the _test in the naming, which won't be backwards + # compatible during migration from iOS recipe to Chromium. + output_name = "${_target_name}" + } + + ios_test_runner_wrapper(target_name) { + forward_variables_from(invoker, + [ + "data", + "data_deps", + "deps", + "executable_args", + "retries", + "shards", + "xcode_test_application_name", + ]) + _root_build_dir = rebase_path("${root_build_dir}", root_build_dir) + + if (!defined(data_deps)) { + data_deps = [] + } + + # Include the top ios_test_runner_xcuitest target, and the host app + data_deps += [ ":${_test_target}" ] + + if (!defined(executable_args)) { + executable_args = [] + } + + # The xcuitest module is bundled as *-Runner.app, while the host app is + # bundled as *.app. + executable_args += [ + "--app", + "@WrappedPath(${_root_build_dir}/${target_name}-Runner.app)", + ] + executable_args += [ + "--host-app", + "@WrappedPath(${_root_build_dir}/${xcode_test_application_name}.app)", + ] + } +} diff --git a/build/config/ios/resources/XCTRunnerAddition+Info.plist b/build/config/ios/resources/XCTRunnerAddition+Info.plist index cf9463f6942a..ed26f55d1636 100644 --- a/build/config/ios/resources/XCTRunnerAddition+Info.plist +++ b/build/config/ios/resources/XCTRunnerAddition+Info.plist @@ -3,7 +3,7 @@ CFBundleIdentifier - com.apple.test.${EXECUTABLE_NAME} + ${BUNDLE_IDENTIFIER} CFBundleName ${PRODUCT_NAME} CFBundleExecutable diff --git a/build/config/ios/rules.gni b/build/config/ios/rules.gni index a57254874fa0..c6d40925c4c9 100644 --- a/build/config/ios/rules.gni +++ b/build/config/ios/rules.gni @@ -1,23 +1,16 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/apple/apple_info_plist.gni") import("//build/config/apple/symbols.gni") +import("//build/config/compiler/compiler.gni") import("//build/config/ios/ios_sdk.gni") import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") import("//build/toolchain/toolchain.gni") import("//build_overrides/build.gni") -declare_args() { - # Set to true if an Xcode project is generated for this build. Set this to - # false if you do not plan to run `gn gen --ide=xcode` in this directory. - # This will speed up the generation at the cost of generating an invalid - # Xcode project if `gn gen --ide=xcode` is used. Defaults to true (favor - # correctness over speed). - ios_set_attributes_for_xcode_project_generation = true -} - # Constants corresponding to the bundle type identifiers use application, # application extension, XCTest and XCUITest targets respectively. _ios_xcode_app_bundle_id = "com.apple.product-type.application" @@ -25,110 +18,6 @@ _ios_xcode_appex_bundle_id = "com.apple.product-type.app-extension" _ios_xcode_xctest_bundle_id = "com.apple.product-type.bundle.unit-test" _ios_xcode_xcuitest_bundle_id = "com.apple.product-type.bundle.ui-testing" -# Invokes lipo on multiple arch-specific binaries to create a fat binary. -# -# Arguments -# -# arch_binary_target -# name of the target generating the arch-specific binaries, they must -# be named $target_out_dir/$toolchain_cpu/$arch_binary_output. -# -# arch_binary_output -# (optional, defaults to the name of $arch_binary_target) base name of -# the arch-specific binary generated by arch_binary_target. -# -# output_name -# (optional, defaults to $target_name) base name of the target output, -# the full path will be $target_out_dir/$output_name. -# -# configs -# (optional) a list of configurations, this is used to check whether -# the binary should be stripped, when "enable_stripping" is true. -# -template("lipo_binary") { - assert(defined(invoker.arch_binary_target), - "arch_binary_target must be defined for $target_name") - - _target_name = target_name - _output_name = target_name - if (defined(invoker.output_name)) { - _output_name = invoker.output_name - } - - _all_target_cpu = [ current_cpu ] + additional_target_cpus - _all_toolchains = [ current_toolchain ] + additional_toolchains - - _arch_binary_target = invoker.arch_binary_target - _arch_binary_output = get_label_info(_arch_binary_target, "name") - if (defined(invoker.arch_binary_output)) { - _arch_binary_output = invoker.arch_binary_output - } - - action(_target_name) { - forward_variables_from(invoker, - "*", - [ - "arch_binary_output", - "arch_binary_target", - "configs", - "output_name", - ]) - - script = "//build/toolchain/apple/linker_driver.py" - - # http://crbug.com/762840. Fix for bots running out of memory. - pool = "//build/toolchain:link_pool($default_toolchain)" - - outputs = [ "$target_out_dir/$_output_name" ] - - deps = [] - _index = 0 - inputs = [] - foreach(_cpu, _all_target_cpu) { - _toolchain = _all_toolchains[_index] - _index = _index + 1 - - inputs += - [ get_label_info("$_arch_binary_target($_toolchain)", - "target_out_dir") + "/$_cpu/$_arch_binary_output" ] - - deps += [ "$_arch_binary_target($_toolchain)" ] - } - - args = [ - "xcrun", - "lipo", - "-create", - "-output", - rebase_path("$target_out_dir/$_output_name", root_build_dir), - ] + rebase_path(inputs, root_build_dir) - - if (enable_dsyms) { - _dsyms_output_dir = "$root_out_dir/$_output_name.dSYM" - outputs += [ - "$_dsyms_output_dir/", - "$_dsyms_output_dir/Contents/Info.plist", - "$_dsyms_output_dir/Contents/Resources/DWARF/$_output_name", - ] - args += [ "-Wcrl,dsym," + rebase_path("$root_out_dir/.", root_build_dir) ] - if (!use_xcode_clang) { - args += [ "-Wcrl,dsymutilpath," + - rebase_path("//tools/clang/dsymutil/bin/dsymutil", - root_build_dir) ] - } - } - - if (enable_stripping) { - args += [ "-Wcrl,strip,-x,-S" ] - if (save_unstripped_output) { - outputs += [ "$root_out_dir/$_output_name.unstripped" ] - args += [ "-Wcrl,unstripped," + - rebase_path("$root_out_dir/.", root_build_dir) ] - } - } - } -} - # Wrapper around create_bundle taking care of code signature settings. # # Arguments @@ -300,14 +189,6 @@ template("create_signed_bundle") { _enable_code_signing = invoker.enable_code_signing } - if (!ios_set_attributes_for_xcode_project_generation) { - not_needed(invoker, - [ - "xcode_product_bundle_id", - "xcode_extra_attributes", - ]) - } - create_bundle(_target_name) { forward_variables_from(invoker, [ @@ -344,35 +225,25 @@ template("create_signed_bundle") { public_deps = [] } - if (ios_set_attributes_for_xcode_project_generation) { - _xcode_product_bundle_id = "" - if (defined(invoker.xcode_product_bundle_id)) { - _xcode_product_bundle_id = invoker.xcode_product_bundle_id - } - - if (_xcode_product_bundle_id != "") { - _ios_provisioning_profile_info = - exec_script("//build/config/ios/codesign.py", - [ - "find-provisioning-profile", - "-b=" + _xcode_product_bundle_id, - ], - "json") - } + _bundle_identifier = "" + if (defined(invoker.xcode_product_bundle_id)) { + _bundle_identifier = invoker.xcode_product_bundle_id + assert(_bundle_identifier == string_replace(_bundle_identifier, "_", "-"), + "$target_name: bundle_identifier does not respect rfc1034: " + + _bundle_identifier) + } - xcode_extra_attributes = { - IPHONEOS_DEPLOYMENT_TARGET = ios_deployment_target - if (_xcode_product_bundle_id != "") { - CODE_SIGN_IDENTITY = "iPhone Developer" - DEVELOPMENT_TEAM = _ios_provisioning_profile_info.team_identifier - PRODUCT_BUNDLE_IDENTIFIER = _xcode_product_bundle_id - PROVISIONING_PROFILE_SPECIFIER = _ios_provisioning_profile_info.name - } + xcode_extra_attributes = { + IPHONEOS_DEPLOYMENT_TARGET = ios_deployment_target + PRODUCT_BUNDLE_IDENTIFIER = _bundle_identifier + CODE_SIGNING_REQUIRED = "NO" + CODE_SIGNING_ALLOWED = "NO" + CODE_SIGN_IDENTITY = "" + DONT_GENERATE_INFOPLIST_FILE = "YES" - # If invoker has defined extra attributes, they override the defaults. - if (defined(invoker.xcode_extra_attributes)) { - forward_variables_from(invoker.xcode_extra_attributes, "*") - } + # If invoker has defined extra attributes, they override the defaults. + if (defined(invoker.xcode_extra_attributes)) { + forward_variables_from(invoker.xcode_extra_attributes, "*") } } @@ -439,6 +310,11 @@ template("create_signed_bundle") { "-i=" + ios_code_signing_identity, "-b=" + rebase_path(_bundle_binary_path, root_build_dir), ] + foreach(mobileprovision, ios_mobileprovision_files) { + code_signing_args += + [ "-m=" + rebase_path(mobileprovision, root_build_dir) ] + } + code_signing_sources += ios_mobileprovision_files if (_enable_entitlements) { code_signing_args += [ "-e=" + rebase_path(_entitlements_path, root_build_dir) ] @@ -456,7 +332,7 @@ template("create_signed_bundle") { # rebase_path here unless using Goma RBE and system Xcode (as in that # case the system framework are found via a symlink in root_build_dir). foreach(_framework, invoker.extra_system_frameworks) { - if (use_system_xcode && use_goma) { + if (use_system_xcode && (use_goma || use_remoteexec)) { _framework_path = rebase_path(_framework, root_build_dir) } else { _framework_path = _framework @@ -525,11 +401,7 @@ template("ios_info_plist") { apple_info_plist(target_name) { format = "binary1" - extra_substitutions = [] - if (defined(invoker.extra_substitutions)) { - extra_substitutions = invoker.extra_substitutions - } - extra_substitutions += [ + extra_substitutions = [ "IOS_BUNDLE_ID_PREFIX=$ios_app_bundle_id_prefix", "IOS_PLATFORM_BUILD=$ios_platform_build", "IOS_PLATFORM_NAME=$ios_sdk_name", @@ -542,6 +414,9 @@ template("ios_info_plist") { "XCODE_BUILD=$xcode_build", "XCODE_VERSION=$xcode_version", ] + if (defined(invoker.extra_substitutions)) { + extra_substitutions += invoker.extra_substitutions + } plist_templates = [ "//build/config/ios/BuildInfo.plist", _info_plist, @@ -612,10 +487,11 @@ template("ios_info_plist") { # variant with the same binary but the correct bundle_deps, the bundle # at $target_out_dir/$output_name will be a copy of the first variant. # -# xcode_product_bundle_id: -# (optional) string, the bundle ID that will be added in the XCode -# attributes to enable some features when debugging (e.g. MetricKit). -# defaults to "$ios_app_bundle_id_prefix.$output_name". +# bundle_identifier: +# (optional) string, value of CFBundleIdentifier in the application +# Info.plist, defaults to "$ios_app_bundle_id_prefix.$output_name" +# if omitted. Will be used to set BUNDLE_IDENTIFIER when generating +# the application Info.plist # # For more information, see "gn help executable". template("ios_app_bundle") { @@ -625,32 +501,21 @@ template("ios_app_bundle") { _output_name = invoker.output_name } - _primary_toolchain = current_toolchain - if (is_fat_secondary_toolchain) { - _primary_toolchain = primary_fat_toolchain_name - } - assert( !defined(invoker.bundle_extension), "bundle_extension must not be set for ios_app_bundle template for $target_name") - _xcode_product_bundle_id = "$ios_app_bundle_id_prefix.$_output_name" - if (defined(invoker.xcode_product_bundle_id)) { - _xcode_product_bundle_id = invoker.xcode_product_bundle_id - _xcode_product_bundle_id = - "$ios_app_bundle_id_prefix.$_xcode_product_bundle_id" - } else if (defined(invoker.bundle_id)) { - _xcode_product_bundle_id = invoker.bundle_id + if (defined(invoker.bundle_identifier)) { + _bundle_identifier = invoker.bundle_identifier + assert(_bundle_identifier == string_replace(_bundle_identifier, "_", "-"), + "$target_name: bundle_identifier does not respect rfc1034: " + + _bundle_identifier) + } else { + # Bundle identifier should respect rfc1034, so replace "_" with "-". + _bundle_identifier = + "$ios_app_bundle_id_prefix." + string_replace(_output_name, "_", "-") } - # Bundle ID should respect rfc1034 and replace _ with -. - _xcode_product_bundle_id = - string_replace("$_xcode_product_bundle_id", "_", "-") - - _arch_executable_source = _target_name + "_arch_executable_sources" - _arch_executable_target = _target_name + "_arch_executable" - _lipo_executable_target = _target_name + "_executable" - if (defined(invoker.variants) && invoker.variants != []) { _variants = [] @@ -687,35 +552,11 @@ template("ios_app_bundle") { _default_variant = _variants[0] - source_set(_arch_executable_source) { - forward_variables_from(invoker, - "*", - [ - "bundle_deps", - "bundle_deps_filter", - "bundle_extension", - "enable_code_signing", - "entitlements_path", - "entitlements_target", - "extra_substitutions", - "extra_system_frameworks", - "info_plist", - "info_plist_target", - "output_name", - "product_type", - "visibility", - "xcode_extra_attributes", - ]) - - visibility = [ ":$_arch_executable_target" ] - } - - if (!is_fat_secondary_toolchain || target_environment == "simulator") { - _generate_entitlements_target = _target_name + "_gen_entitlements" - _generate_entitlements_output = - get_label_info(":$_generate_entitlements_target($_primary_toolchain)", - "target_out_dir") + "/$_output_name.xcent" - } + _executable_target = _target_name + "_executable" + _generate_entitlements_target = _target_name + "_gen_entitlements" + _generate_entitlements_output = + get_label_info(":$_generate_entitlements_target", "target_out_dir") + + "/$_output_name.xcent" _product_type = _ios_xcode_app_bundle_id if (defined(invoker.product_type)) { @@ -732,7 +573,7 @@ template("ios_app_bundle") { _is_app_bundle = _product_type == _ios_xcode_app_bundle_id - executable(_arch_executable_target) { + executable(_executable_target) { forward_variables_from(invoker, "*", [ @@ -748,20 +589,14 @@ template("ios_app_bundle") { "info_plist_target", "output_name", "product_type", - "sources", "visibility", "xcode_extra_attributes", ]) - visibility = [ ":$_lipo_executable_target($_primary_toolchain)" ] - if (is_fat_secondary_toolchain) { - visibility += [ ":$_target_name" ] - } - - if (!defined(deps)) { - deps = [] + visibility = [] + foreach(_variant, _variants) { + visibility += [ ":${_variant.target_name}" ] } - deps += [ ":$_arch_executable_source" ] if (!defined(frameworks)) { frameworks = [] @@ -769,7 +604,10 @@ template("ios_app_bundle") { frameworks += [ "UIKit.framework" ] if (target_environment == "simulator") { - deps += [ ":$_generate_entitlements_target($_primary_toolchain)" ] + if (!defined(deps)) { + deps = [] + } + deps += [ ":$_generate_entitlements_target" ] if (!defined(inputs)) { inputs = [] @@ -785,197 +623,171 @@ template("ios_app_bundle") { output_name = _output_name output_prefix_override = true - output_dir = "$target_out_dir/$current_cpu" + output_dir = target_out_dir } - if (is_fat_secondary_toolchain) { - # For fat builds, only the default toolchain will generate an application - # bundle. For the other toolchains, the template is only used for building - # the arch-specific binary, thus the default target is just a group(). + _generate_info_plist = target_name + "_generate_info_plist" + ios_info_plist(_generate_info_plist) { + forward_variables_from(invoker, + [ + "info_plist", + "info_plist_target", + ]) - group(_target_name) { - forward_variables_from(invoker, - [ - "visibility", - "testonly", - ]) - public_deps = [ ":$_arch_executable_target" ] + executable_name = _output_name + + extra_substitutions = [ "BUNDLE_IDENTIFIER=$_bundle_identifier" ] + if (defined(invoker.extra_substitutions)) { + extra_substitutions += invoker.extra_substitutions + } + } + + if (!defined(invoker.entitlements_target)) { + _entitlements_path = "//build/config/ios/entitlements.plist" + if (defined(invoker.entitlements_path)) { + _entitlements_path = invoker.entitlements_path } } else { - lipo_binary(_lipo_executable_target) { - forward_variables_from(invoker, - [ - "configs", - "testonly", - ]) + assert(!defined(invoker.entitlements_path), + "Cannot define both entitlements_path and entitlements_target" + + "for $_target_name") - visibility = [] - foreach(_variant, _variants) { - visibility += [ ":${_variant.target_name}" ] - } + _entitlements_target_outputs = + get_target_outputs(invoker.entitlements_target) + _entitlements_path = _entitlements_target_outputs[0] + } - output_name = _output_name - arch_binary_target = ":$_arch_executable_target" - arch_binary_output = _output_name + action(_generate_entitlements_target) { + _gen_info_plist_outputs = get_target_outputs(":$_generate_info_plist") + _info_plist_path = _gen_info_plist_outputs[0] + + script = "//build/config/ios/codesign.py" + deps = [ ":$_generate_info_plist" ] + if (defined(invoker.entitlements_target)) { + deps += [ invoker.entitlements_target ] } + sources = [ + _entitlements_path, + _info_plist_path, + ] + sources += ios_mobileprovision_files - _generate_info_plist = target_name + "_generate_info_plist" - ios_info_plist(_generate_info_plist) { - forward_variables_from(invoker, - [ - "extra_substitutions", - "info_plist", - "info_plist_target", - ]) + outputs = [ _generate_entitlements_output ] - executable_name = _output_name + args = [ + "generate-entitlements", + "-e=" + rebase_path(_entitlements_path, root_build_dir), + "-p=" + rebase_path(_info_plist_path, root_build_dir), + ] + foreach(mobileprovision, ios_mobileprovision_files) { + args += [ "-m=" + rebase_path(mobileprovision, root_build_dir) ] } + args += rebase_path(outputs, root_build_dir) + } - if (!is_fat_secondary_toolchain) { - if (!defined(invoker.entitlements_target)) { - _entitlements_path = "//build/config/ios/entitlements.plist" - if (defined(invoker.entitlements_path)) { - _entitlements_path = invoker.entitlements_path - } - } else { - assert(!defined(invoker.entitlements_path), - "Cannot define both entitlements_path and entitlements_target" + - "for $_target_name") + # Only write PkgInfo for real application, not application extension. + if (_is_app_bundle) { + _create_pkg_info = target_name + "_pkg_info" + action(_create_pkg_info) { + forward_variables_from(invoker, [ "testonly" ]) + script = "//build/apple/write_pkg_info.py" + inputs = [ "//build/apple/plist_util.py" ] + sources = get_target_outputs(":$_generate_info_plist") + outputs = [ + # Cannot name the output PkgInfo as the name will not be unique if + # multiple ios_app_bundle are defined in the same BUILD.gn file. The + # file is renamed in the bundle_data outputs to the correct name. + "$target_gen_dir/$target_name", + ] + args = [ "--plist" ] + rebase_path(sources, root_build_dir) + + [ "--output" ] + rebase_path(outputs, root_build_dir) + deps = [ ":$_generate_info_plist" ] + } - _entitlements_target_outputs = - get_target_outputs(invoker.entitlements_target) - _entitlements_path = _entitlements_target_outputs[0] - } + _bundle_data_pkg_info = target_name + "_bundle_data_pkg_info" + bundle_data(_bundle_data_pkg_info) { + forward_variables_from(invoker, [ "testonly" ]) + sources = get_target_outputs(":$_create_pkg_info") + outputs = [ "{{bundle_resources_dir}}/PkgInfo" ] + public_deps = [ ":$_create_pkg_info" ] + } + } - action(_generate_entitlements_target) { - _gen_info_plist_outputs = get_target_outputs(":$_generate_info_plist") - _info_plist_path = _gen_info_plist_outputs[0] + foreach(_variant, _variants) { + create_signed_bundle(_variant.target_name) { + forward_variables_from(invoker, + [ + "bundle_deps", + "bundle_deps_filter", + "data_deps", + "deps", + "enable_code_signing", + "entitlements_path", + "entitlements_target", + "extra_system_frameworks", + "public_configs", + "public_deps", + "testonly", + "visibility", + "xcode_extra_attributes", + ]) - script = "//build/config/ios/codesign.py" - deps = [ ":$_generate_info_plist" ] - if (defined(invoker.entitlements_target)) { - deps += [ invoker.entitlements_target ] - } - sources = [ - _entitlements_path, - _info_plist_path, - ] - outputs = [ _generate_entitlements_output ] + output_name = _output_name + bundle_gen_dir = _variant.bundle_gen_dir + bundle_binary_target = ":$_executable_target" + bundle_binary_output = _output_name + bundle_extension = _bundle_extension + product_type = _product_type + xcode_product_bundle_id = _bundle_identifier - args = [ - "generate-entitlements", - "-e=" + rebase_path(_entitlements_path, root_build_dir), - "-p=" + rebase_path(_info_plist_path, root_build_dir), - ] + rebase_path(outputs, root_build_dir) - } - } + _generate_info_plist_outputs = + get_target_outputs(":$_generate_info_plist") + primary_info_plist = _generate_info_plist_outputs[0] + partial_info_plist = + "$target_gen_dir/${_variant.target_name}_partial_info.plist" - # Only write PkgInfo for real application, not application extension. - if (_is_app_bundle) { - _create_pkg_info = target_name + "_pkg_info" - action(_create_pkg_info) { - forward_variables_from(invoker, [ "testonly" ]) - script = "//build/apple/write_pkg_info.py" - inputs = [ "//build/apple/plist_util.py" ] - sources = get_target_outputs(":$_generate_info_plist") - outputs = [ - # Cannot name the output PkgInfo as the name will not be unique if - # multiple ios_app_bundle are defined in the same BUILD.gn file. The - # file is renamed in the bundle_data outputs to the correct name. - "$target_gen_dir/$target_name", - ] - args = [ "--plist" ] + rebase_path(sources, root_build_dir) + - [ "--output" ] + rebase_path(outputs, root_build_dir) - deps = [ ":$_generate_info_plist" ] + if (!defined(deps)) { + deps = [] } + deps += [ ":$_generate_info_plist" ] - _bundle_data_pkg_info = target_name + "_bundle_data_pkg_info" - bundle_data(_bundle_data_pkg_info) { - forward_variables_from(invoker, [ "testonly" ]) - sources = get_target_outputs(":$_create_pkg_info") - outputs = [ "{{bundle_resources_dir}}/PkgInfo" ] - public_deps = [ ":$_create_pkg_info" ] + if (!defined(bundle_deps)) { + bundle_deps = [] } - } - - foreach(_variant, _variants) { - create_signed_bundle(_variant.target_name) { - forward_variables_from(invoker, - [ - "bundle_deps", - "bundle_deps_filter", - "data_deps", - "deps", - "enable_code_signing", - "entitlements_path", - "entitlements_target", - "extra_system_frameworks", - "public_configs", - "public_deps", - "testonly", - "visibility", - "xcode_extra_attributes", - ]) - - output_name = _output_name - bundle_gen_dir = _variant.bundle_gen_dir - bundle_binary_target = ":$_lipo_executable_target" - bundle_binary_output = _output_name - bundle_extension = _bundle_extension - product_type = _product_type - xcode_product_bundle_id = _xcode_product_bundle_id - - _generate_info_plist_outputs = - get_target_outputs(":$_generate_info_plist") - primary_info_plist = _generate_info_plist_outputs[0] - partial_info_plist = - "$target_gen_dir/${_variant.target_name}_partial_info.plist" - - if (!defined(deps)) { - deps = [] - } - deps += [ ":$_generate_info_plist" ] + if (_is_app_bundle) { + bundle_deps += [ ":$_bundle_data_pkg_info" ] + } + bundle_deps += _variant.bundle_deps - if (!defined(bundle_deps)) { - bundle_deps = [] - } - if (_is_app_bundle) { - bundle_deps += [ ":$_bundle_data_pkg_info" ] + if (target_environment == "simulator") { + if (!defined(data_deps)) { + data_deps = [] } - bundle_deps += _variant.bundle_deps - - if (target_environment == "simulator") { - if (!defined(data_deps)) { - data_deps = [] - } + if (build_with_chromium) { data_deps += [ "//testing/iossim" ] } } } + } - if (_default_variant.name != "") { - _bundle_short_name = "$_output_name$_bundle_extension" - action(_target_name) { - forward_variables_from(invoker, [ "testonly" ]) + if (_default_variant.name != "") { + _bundle_short_name = "$_output_name$_bundle_extension" + action(_target_name) { + forward_variables_from(invoker, [ "testonly" ]) - script = "//build/config/ios/hardlink.py" - public_deps = [] - foreach(_variant, _variants) { - public_deps += [ ":${_variant.target_name}" ] - } + script = "//build/config/ios/hardlink.py" + public_deps = [] + foreach(_variant, _variants) { + public_deps += [ ":${_variant.target_name}" ] + } - sources = [ "${_default_variant.bundle_gen_dir}/$_bundle_short_name" ] - outputs = [ "$root_out_dir/$_bundle_short_name" ] + sources = [ "${_default_variant.bundle_gen_dir}/$_bundle_short_name" ] + outputs = [ "$root_out_dir/$_bundle_short_name" ] - args = rebase_path(sources, root_build_dir) + - rebase_path(outputs, root_build_dir) - } + args = rebase_path(sources, root_build_dir) + + rebase_path(outputs, root_build_dir) } } - - if (is_fat_secondary_toolchain) { - not_needed("*") - } } set_defaults("ios_app_bundle") { @@ -1278,27 +1090,12 @@ template("ios_framework_bundle") { _has_public_headers = defined(invoker.public_headers) && invoker.public_headers != [] - _primary_toolchain = current_toolchain - if (is_fat_secondary_toolchain) { - _primary_toolchain = primary_fat_toolchain_name - } - - # Public configs are not propagated across toolchain (see crbug.com/675224) - # so some configs have to be defined for both default_toolchain and all others - # toolchains when performing a fat build. Use "get_label_info" to construct - # the path since they need to be relative to the default_toolchain. - - _default_toolchain_root_out_dir = - get_label_info("$_target_name($_primary_toolchain)", "root_out_dir") - - _arch_shared_library_source = _target_name + "_arch_shared_library_sources" - _arch_shared_library_target = _target_name + "_arch_shared_library" - _lipo_shared_library_target = _target_name + "_shared_library" + _shared_library_target = _target_name + "_shared_library" _link_target_name = _target_name + "+link" if (_has_public_headers) { _default_toolchain_target_gen_dir = - get_label_info("$_target_name($_primary_toolchain)", "target_gen_dir") + get_label_info("$_target_name", "target_gen_dir") _framework_headers_target = _target_name + "_framework_headers" @@ -1307,7 +1104,7 @@ template("ios_framework_bundle") { "$_default_toolchain_target_gen_dir/$_output_name.headers.hmap" config(_headers_map_config) { visibility = [ - ":${_arch_shared_library_source}", + ":${_shared_library_target}", ":${_target_name}_signed_bundle", ] include_dirs = [ _header_map_filename ] @@ -1316,7 +1113,7 @@ template("ios_framework_bundle") { _framework_headers_config = _target_name + "_framework_headers_config" config(_framework_headers_config) { - framework_dirs = [ _default_toolchain_root_out_dir ] + framework_dirs = [ root_out_dir ] } _framework_public_config = _target_name + "_public_config" @@ -1325,7 +1122,7 @@ template("ios_framework_bundle") { frameworks = [ "$_output_name.framework" ] } - source_set(_arch_shared_library_source) { + shared_library(_shared_library_target) { forward_variables_from(invoker, "*", [ @@ -1341,7 +1138,13 @@ template("ios_framework_bundle") { "visibility", ]) - visibility = [ ":$_arch_shared_library_target" ] + visibility = [ ":${_target_name}_signed_bundle" ] + + if (!defined(ldflags)) { + ldflags = [] + } + ldflags += + [ "-Wl,-install_name,@rpath/$_output_name.framework/$_output_name" ] if (_has_public_headers) { configs += [ ":$_headers_map_config" ] @@ -1349,301 +1152,214 @@ template("ios_framework_bundle") { if (!defined(deps)) { deps = [] } - deps += [ ":$_framework_headers_target($_primary_toolchain)" ] + deps += [ ":$_framework_headers_target" ] } - } - - shared_library(_arch_shared_library_target) { - forward_variables_from(invoker, - "*", - [ - "bundle_deps", - "bundle_deps_filter", - "data_deps", - "enable_code_signing", - "extra_substitutions", - "info_plist", - "info_plist_target", - "output_name", - "sources", - "public_configs", - "visibility", - ]) - - visibility = [ ":$_lipo_shared_library_target($_primary_toolchain)" ] - if (is_fat_secondary_toolchain) { - visibility += [ - ":${_target_name}", - ":${_target_name}_signed_bundle", - ] - } - - if (!defined(deps)) { - deps = [] - } - deps += [ ":$_arch_shared_library_source" ] - if (_has_public_headers) { - deps += [ ":$_framework_headers_target($_primary_toolchain)" ] - } - if (!defined(ldflags)) { - ldflags = [] - } - ldflags += - [ "-Wl,-install_name,@rpath/$_output_name.framework/$_output_name" ] output_extension = "" output_name = _output_name output_prefix_override = true - output_dir = "$target_out_dir/$current_cpu" + output_dir = target_out_dir } - if (is_fat_secondary_toolchain) { - # For fat builds, only the default toolchain will generate a framework - # bundle. For the other toolchains, the template is only used for building - # the arch-specific binary, thus the default target is just a group(). + if (_has_public_headers) { + _public_headers = invoker.public_headers - group(_target_name) { - forward_variables_from(invoker, - [ - "visibility", - "testonly", - ]) - public_deps = [ ":$_arch_shared_library_target" ] + _framework_root_dir = "$root_out_dir/$_output_name.framework" + if (target_environment == "simulator" || target_environment == "device") { + _framework_contents_dir = _framework_root_dir + } else if (target_environment == "catalyst") { + _framework_contents_dir = "$_framework_root_dir/Versions/A" } - group(_link_target_name) { + _compile_headers_map_target = _target_name + "_compile_headers_map" + action(_compile_headers_map_target) { + visibility = [ ":$_framework_headers_target" ] forward_variables_from(invoker, [ - "public_configs", - "visibility", + "deps", + "public_deps", "testonly", ]) - public_deps = [ ":$_link_target_name($_primary_toolchain)" ] - - if (_has_public_headers) { - if (!defined(public_configs)) { - public_configs = [] + script = "//build/config/ios/write_framework_hmap.py" + outputs = [ _header_map_filename ] + + # The header map generation only wants the list of headers, not all of + # sources, so filter any non-header source files from "sources". It is + # less error prone that having the developer duplicate the list of all + # headers in addition to "sources". + sources = [] + foreach(_source, invoker.sources) { + if (get_path_info(_source, "extension") == "h") { + sources += [ _source ] } - public_configs += [ ":$_framework_headers_config" ] } - if (!defined(all_dependent_configs)) { - all_dependent_configs = [] - } - all_dependent_configs += [ ":$_framework_public_config" ] - } - group("$_target_name+bundle") { - forward_variables_from(invoker, [ "testonly" ]) - public_deps = [ ":$_target_name+bundle($_primary_toolchain)" ] + args = [ + rebase_path(_header_map_filename, root_build_dir), + rebase_path(_framework_root_dir, root_build_dir), + ] + rebase_path(sources, root_build_dir) } - not_needed(invoker, "*") - } else { - if (_has_public_headers) { - _public_headers = invoker.public_headers - - _framework_root_dir = "$root_out_dir/$_output_name.framework" - if (target_environment == "simulator" || target_environment == "device") { - _framework_contents_dir = _framework_root_dir - } else if (target_environment == "catalyst") { - _framework_contents_dir = "$_framework_root_dir/Versions/A" - } - - _compile_headers_map_target = _target_name + "_compile_headers_map" - action(_compile_headers_map_target) { - visibility = [ ":$_framework_headers_target" ] - forward_variables_from(invoker, - [ - "deps", - "public_deps", - "testonly", - ]) - script = "//build/config/ios/write_framework_hmap.py" - outputs = [ _header_map_filename ] - - # The header map generation only wants the list of headers, not all of - # sources, so filter any non-header source files from "sources". It is - # less error prone that having the developer duplicate the list of all - # headers in addition to "sources". - sources = [] - foreach(_source, invoker.sources) { - if (get_path_info(_source, "extension") == "h") { - sources += [ _source ] - } - } - - args = [ - rebase_path(_header_map_filename), - rebase_path(_framework_root_dir, root_build_dir), - ] + rebase_path(sources, root_build_dir) - } - - _create_module_map_target = _target_name + "_module_map" - action(_create_module_map_target) { - visibility = [ ":$_framework_headers_target" ] - script = "//build/config/ios/write_framework_modulemap.py" - outputs = [ "$_framework_contents_dir/Modules/module.modulemap" ] - args = [ - _output_name, - rebase_path("$_framework_contents_dir/Modules", root_build_dir), - ] - } - - _copy_public_headers_target = _target_name + "_copy_public_headers" - copy(_copy_public_headers_target) { - forward_variables_from(invoker, - [ - "testonly", - "deps", - ]) - visibility = [ ":$_framework_headers_target" ] - sources = _public_headers - outputs = [ "$_framework_contents_dir/Headers/{{source_file_part}}" ] - - # Do not use forward_variables_from for "public_deps" as - # we do not want to forward those dependencies. - if (defined(invoker.public_deps)) { - if (!defined(deps)) { - deps = [] - } - deps += invoker.public_deps - } - } - - group(_framework_headers_target) { - forward_variables_from(invoker, [ "testonly" ]) - deps = [ - ":$_compile_headers_map_target", - ":$_create_module_map_target", - ] - public_deps = [ ":$_copy_public_headers_target" ] - } + _create_module_map_target = _target_name + "_module_map" + action(_create_module_map_target) { + visibility = [ ":$_framework_headers_target" ] + script = "//build/config/ios/write_framework_modulemap.py" + outputs = [ "$_framework_contents_dir/Modules/module.modulemap" ] + args = [ + _output_name, + rebase_path("$_framework_contents_dir/Modules", root_build_dir), + ] } - lipo_binary(_lipo_shared_library_target) { + _copy_public_headers_target = _target_name + "_copy_public_headers" + copy(_copy_public_headers_target) { forward_variables_from(invoker, [ - "configs", "testonly", + "deps", ]) + visibility = [ ":$_framework_headers_target" ] + sources = _public_headers + outputs = [ "$_framework_contents_dir/Headers/{{source_file_part}}" ] - visibility = [ ":${_target_name}_signed_bundle" ] - output_name = _output_name - arch_binary_target = ":$_arch_shared_library_target" - arch_binary_output = _output_name + # Do not use forward_variables_from for "public_deps" as + # we do not want to forward those dependencies. + if (defined(invoker.public_deps)) { + if (!defined(deps)) { + deps = [] + } + deps += invoker.public_deps + } } - _info_plist_target = _target_name + "_info_plist" - _info_plist_bundle = _target_name + "_info_plist_bundle" - ios_info_plist(_info_plist_target) { - visibility = [ ":$_info_plist_bundle" ] - executable_name = _output_name - forward_variables_from(invoker, - [ - "extra_substitutions", - "info_plist", - "info_plist_target", - ]) + group(_framework_headers_target) { + forward_variables_from(invoker, [ "testonly" ]) + deps = [ + ":$_compile_headers_map_target", + ":$_create_module_map_target", + ] + public_deps = [ ":$_copy_public_headers_target" ] } + } - bundle_data(_info_plist_bundle) { - visibility = [ ":${_target_name}_signed_bundle" ] - forward_variables_from(invoker, [ "testonly" ]) - sources = get_target_outputs(":$_info_plist_target") - public_deps = [ ":$_info_plist_target" ] + # Bundle identifier should respect rfc1034, so replace "_" with "-". + _bundle_identifier = + "$ios_app_bundle_id_prefix." + string_replace(_output_name, "_", "-") - if (target_environment != "catalyst") { - outputs = [ "{{bundle_contents_dir}}/Info.plist" ] - } else { - outputs = [ "{{bundle_resources_dir}}/Info.plist" ] - } + _info_plist_target = _target_name + "_info_plist" + _info_plist_bundle = _target_name + "_info_plist_bundle" + ios_info_plist(_info_plist_target) { + visibility = [ ":$_info_plist_bundle" ] + executable_name = _output_name + forward_variables_from(invoker, + [ + "info_plist", + "info_plist_target", + ]) + + extra_substitutions = [ "BUNDLE_IDENTIFIER=$_bundle_identifier" ] + if (defined(invoker.extra_substitutions)) { + extra_substitutions += invoker.extra_substitutions } + } - create_signed_bundle(_target_name + "_signed_bundle") { - forward_variables_from(invoker, - [ - "bundle_deps", - "bundle_deps_filter", - "data_deps", - "deps", - "enable_code_signing", - "public_configs", - "public_deps", - "testonly", - "visibility", - ]) + bundle_data(_info_plist_bundle) { + visibility = [ ":${_target_name}_signed_bundle" ] + forward_variables_from(invoker, [ "testonly" ]) + sources = get_target_outputs(":$_info_plist_target") + public_deps = [ ":$_info_plist_target" ] - product_type = "com.apple.product-type.framework" - bundle_extension = ".framework" + if (target_environment != "catalyst") { + outputs = [ "{{bundle_contents_dir}}/Info.plist" ] + } else { + outputs = [ "{{bundle_resources_dir}}/Info.plist" ] + } + } - output_name = _output_name - bundle_binary_target = ":$_lipo_shared_library_target" - bundle_binary_output = _output_name + create_signed_bundle(_target_name + "_signed_bundle") { + forward_variables_from(invoker, + [ + "bundle_deps", + "bundle_deps_filter", + "data_deps", + "deps", + "enable_code_signing", + "public_configs", + "public_deps", + "testonly", + "visibility", + ]) - has_public_headers = _has_public_headers + product_type = "com.apple.product-type.framework" + bundle_extension = ".framework" - # Framework do not have entitlements nor mobileprovision because they use - # the one from the bundle using them (.app or .appex) as they are just - # dynamic library with shared code. - disable_entitlements = true - disable_embedded_mobileprovision = true + output_name = _output_name + bundle_binary_target = ":$_shared_library_target" + bundle_binary_output = _output_name - if (!defined(deps)) { - deps = [] - } - deps += [ ":$_info_plist_bundle" ] - } + has_public_headers = _has_public_headers - group(_target_name) { - forward_variables_from(invoker, - [ - "public_configs", - "public_deps", - "testonly", - "visibility", - ]) - if (!defined(public_deps)) { - public_deps = [] - } - public_deps += [ ":${_target_name}_signed_bundle" ] + # Framework do not have entitlements nor mobileprovision because they use + # the one from the bundle using them (.app or .appex) as they are just + # dynamic library with shared code. + disable_entitlements = true + disable_embedded_mobileprovision = true - if (_has_public_headers) { - if (!defined(public_configs)) { - public_configs = [] - } - public_configs += [ ":$_framework_headers_config" ] - } + if (!defined(deps)) { + deps = [] } + deps += [ ":$_info_plist_bundle" ] + } - group(_link_target_name) { - forward_variables_from(invoker, - [ - "public_configs", - "public_deps", - "testonly", - "visibility", - ]) - if (!defined(public_deps)) { - public_deps = [] - } - public_deps += [ ":$_target_name" ] + group(_target_name) { + forward_variables_from(invoker, + [ + "public_configs", + "public_deps", + "testonly", + "visibility", + ]) + if (!defined(public_deps)) { + public_deps = [] + } + public_deps += [ ":${_target_name}_signed_bundle" ] - if (!defined(all_dependent_configs)) { - all_dependent_configs = [] + if (_has_public_headers) { + if (!defined(public_configs)) { + public_configs = [] } - all_dependent_configs += [ ":$_framework_public_config" ] + public_configs += [ ":$_framework_headers_config" ] } + } - bundle_data(_target_name + "+bundle") { - forward_variables_from(invoker, - [ - "testonly", - "visibility", - ]) - public_deps = [ ":$_target_name" ] - sources = [ "$root_out_dir/$_output_name.framework" ] - outputs = [ "{{bundle_contents_dir}}/Frameworks/$_output_name.framework" ] + group(_link_target_name) { + forward_variables_from(invoker, + [ + "public_configs", + "public_deps", + "testonly", + "visibility", + ]) + if (!defined(public_deps)) { + public_deps = [] } + public_deps += [ ":$_target_name" ] + + if (!defined(all_dependent_configs)) { + all_dependent_configs = [] + } + all_dependent_configs += [ ":$_framework_public_config" ] + } + + bundle_data(_target_name + "+bundle") { + forward_variables_from(invoker, + [ + "testonly", + "visibility", + ]) + public_deps = [ ":$_target_name" ] + sources = [ "$root_out_dir/$_output_name.framework" ] + outputs = [ "{{bundle_contents_dir}}/Frameworks/$_output_name.framework" ] } } @@ -1691,13 +1407,6 @@ template("ios_xctest_bundle") { assert(defined(invoker.xcode_test_application_name), "xcode_test_application_name must be defined for $target_name") - # Silence "assignment had no effect" error for non-default toolchains as - # following variables are only used in the expansion of the template for the - # default toolchain. - if (is_fat_secondary_toolchain) { - not_needed(invoker, "*") - } - _target_name = target_name _output_name = target_name @@ -1705,173 +1414,134 @@ template("ios_xctest_bundle") { _output_name = invoker.output_name } - _arch_loadable_module_source = _target_name + "_arch_loadable_module_source" - _arch_loadable_module_target = _target_name + "_arch_loadable_module" - _lipo_loadable_module_target = _target_name + "_loadable_module" + _loadable_module_target = _target_name + "_loadable_module" - _primary_toolchain = current_toolchain - if (is_fat_secondary_toolchain) { - _primary_toolchain = primary_fat_toolchain_name - } - - source_set(_arch_loadable_module_source) { - forward_variables_from(invoker, [ "deps" ]) - - testonly = true - visibility = [ ":$_arch_loadable_module_target" ] - } + loadable_module(_loadable_module_target) { + forward_variables_from(invoker, + "*", + [ + "host_target", + "output_dir", + "output_extension", + "output_name", + "output_prefix_override", + "product_type", + "testonly", + "visibility", + "xcode_test_application_name", + "xcode_test_application_output_name", + "xctest_bundle_principal_class", + "bundle_deps_filter", + ]) - loadable_module(_arch_loadable_module_target) { testonly = true - visibility = [ ":$_lipo_loadable_module_target($_primary_toolchain)" ] - if (is_fat_secondary_toolchain) { - visibility += [ ":$_target_name" ] - } + visibility = [ ":$_target_name" ] - deps = [ ":$_arch_loadable_module_source" ] configs += [ "//build/config/ios:xctest_config" ] - output_dir = "$target_out_dir/$current_cpu" + output_dir = target_out_dir output_name = _output_name output_prefix_override = true output_extension = "" } - if (is_fat_secondary_toolchain) { - # For fat builds, only the default toolchain will generate a test bundle. - # For the other toolchains, the template is only used for building the - # arch-specific binary, thus the default target is just a group(). - group(_target_name) { - forward_variables_from(invoker, [ "visibility" ]) - testonly = true - - public_deps = [ ":$_arch_loadable_module_target" ] - } - - not_needed(invoker, "*") - } else { - _info_plist_target = _target_name + "_info_plist" - _info_plist_bundle = _target_name + "_info_plist_bundle" - - ios_info_plist(_info_plist_target) { - testonly = true - visibility = [ ":$_info_plist_bundle" ] - - info_plist = "//build/config/ios/Module-Info.plist" - executable_name = _output_name + _info_plist_target = _target_name + "_info_plist" + _info_plist_bundle = _target_name + "_info_plist_bundle" - if (defined(invoker.xctest_bundle_principal_class)) { - _principal_class = invoker.xctest_bundle_principal_class - } else { - # Fall back to a reasonable default value. - _principal_class = "NSObject" - } - extra_substitutions = [ - "XCTEST_BUNDLE_PRINCIPAL_CLASS=${_principal_class}", - "MODULE_BUNDLE_ID=gtest.$_output_name", - ] - } + # Bundle identifier should respect rfc1034, so replace "_" with "-". + _bundle_identifier = "$ios_app_bundle_id_prefix.chrome." + + string_replace(_output_name, "_", "-") - bundle_data(_info_plist_bundle) { - testonly = true - visibility = [ ":$_target_name" ] + ios_info_plist(_info_plist_target) { + testonly = true + visibility = [ ":$_info_plist_bundle" ] - public_deps = [ ":$_info_plist_target" ] + info_plist = "//build/config/ios/Module-Info.plist" + executable_name = _output_name - sources = get_target_outputs(":$_info_plist_target") - outputs = [ "{{bundle_contents_dir}}/Info.plist" ] + if (defined(invoker.xctest_bundle_principal_class)) { + _principal_class = invoker.xctest_bundle_principal_class + } else { + # Fall back to a reasonable default value. + _principal_class = "NSObject" } + extra_substitutions = [ + "XCTEST_BUNDLE_PRINCIPAL_CLASS=${_principal_class}", + "BUNDLE_IDENTIFIER=$_bundle_identifier", + ] + } - lipo_binary(_lipo_loadable_module_target) { - forward_variables_from(invoker, [ "configs" ]) + bundle_data(_info_plist_bundle) { + testonly = true + visibility = [ ":$_target_name" ] - testonly = true - visibility = [ ":$_target_name" ] + public_deps = [ ":$_info_plist_target" ] - output_name = _output_name - arch_binary_target = ":$_arch_loadable_module_target" - arch_binary_output = _output_name - } + sources = get_target_outputs(":$_info_plist_target") + outputs = [ "{{bundle_contents_dir}}/Info.plist" ] + } - _xctest_bundle = _target_name + "_bundle" - create_signed_bundle(_target_name) { - forward_variables_from(invoker, - [ - "bundle_id", - "data_deps", - "enable_code_signing", - "product_type", - "xcode_test_application_name", - ]) + _xctest_bundle = _target_name + "_bundle" + create_signed_bundle(_target_name) { + forward_variables_from(invoker, + [ + "bundle_id", + "data_deps", + "bundle_deps_filter", + "enable_code_signing", + "product_type", + "xcode_test_application_name", + ]) - testonly = true - visibility = [ ":$_xctest_bundle" ] + testonly = true + visibility = [ ":$_xctest_bundle" ] - bundle_extension = ".xctest" + bundle_extension = ".xctest" - output_name = _output_name - bundle_binary_target = ":$_lipo_loadable_module_target" - bundle_binary_output = _output_name + output_name = _output_name + bundle_binary_target = ":$_loadable_module_target" + bundle_binary_output = _output_name + + xcode_extra_attributes = { + IPHONEOS_DEPLOYMENT_TARGET = ios_deployment_target + PRODUCT_BUNDLE_IDENTIFIER = _bundle_identifier + CODE_SIGNING_REQUIRED = "NO" + CODE_SIGNING_ALLOWED = "NO" + CODE_SIGN_IDENTITY = "" + DONT_GENERATE_INFOPLIST_FILE = "YES" + + # For XCUITest, Xcode requires specifying the host application name + # via the TEST_TARGET_NAME attribute. + if (invoker.product_type == _ios_xcode_xcuitest_bundle_id) { + TEST_TARGET_NAME = invoker.xcode_test_application_name + } - if (ios_set_attributes_for_xcode_project_generation) { - _xcode_product_bundle_id = - "$ios_app_bundle_id_prefix.gtest.$_output_name" - - _ios_provisioning_profile_info = - exec_script("//build/config/ios/codesign.py", - [ - "find-provisioning-profile", - "-b=" + _xcode_product_bundle_id, - ], - "json") - - xcode_extra_attributes = { - IPHONEOS_DEPLOYMENT_TARGET = ios_deployment_target - CODE_SIGN_IDENTITY = "iPhone Developer" - DEVELOPMENT_TEAM = _ios_provisioning_profile_info.team_identifier - PRODUCT_BUNDLE_IDENTIFIER = _xcode_product_bundle_id - PROVISIONING_PROFILE_SPECIFIER = _ios_provisioning_profile_info.name - - # For XCUITest, Xcode requires specifying the host application name - # via the TEST_TARGET_NAME attribute. - if (invoker.product_type == _ios_xcode_xcuitest_bundle_id) { - TEST_TARGET_NAME = invoker.xcode_test_application_name - } - - # For XCTest, Xcode requires specifying the host application path via - # both BUNDLE_LOADER and TEST_HOST attributes. - if (invoker.product_type == _ios_xcode_xctest_bundle_id) { - _xcode_app_name = invoker.xcode_test_application_name - if (defined(invoker.xcode_test_application_output_name)) { - _xcode_app_name = invoker.xcode_test_application_output_name - } - - BUNDLE_LOADER = "\$(TEST_HOST)" - TEST_HOST = "\$(BUILT_PRODUCTS_DIR)/" + - "${_xcode_app_name}.app/${_xcode_app_name}" - } + # For XCTest, Xcode requires specifying the host application path via + # both BUNDLE_LOADER and TEST_HOST attributes. + if (invoker.product_type == _ios_xcode_xctest_bundle_id) { + _xcode_app_name = invoker.xcode_test_application_name + if (defined(invoker.xcode_test_application_output_name)) { + _xcode_app_name = invoker.xcode_test_application_output_name } - } else { - not_needed(invoker, - [ - "xcode_test_application_name", - "xcode_test_application_output_name", - ]) - } - deps = [ ":$_info_plist_bundle" ] + BUNDLE_LOADER = "\$(TEST_HOST)" + TEST_HOST = "\$(BUILT_PRODUCTS_DIR)/" + + "${_xcode_app_name}.app/${_xcode_app_name}" + } } - bundle_data(_xctest_bundle) { - forward_variables_from(invoker, [ "host_target" ]) + deps = [ ":$_info_plist_bundle" ] + } + + bundle_data(_xctest_bundle) { + forward_variables_from(invoker, [ "host_target" ]) - testonly = true - visibility = [ ":$host_target" ] + testonly = true + visibility = [ ":$host_target" ] - public_deps = [ ":$_target_name" ] - sources = [ "$root_out_dir/$_output_name.xctest" ] - outputs = [ "{{bundle_contents_dir}}/PlugIns/$_output_name.xctest" ] - } + public_deps = [ ":$_target_name" ] + sources = [ "$root_out_dir/$_output_name.xctest" ] + outputs = [ "{{bundle_contents_dir}}/PlugIns/$_output_name.xctest" ] } } @@ -1946,13 +1616,27 @@ template("ios_xctest_test") { "$ios_sdk_platform_path/Developer/usr/lib/libXCTestBundleInject.dylib", ] + # Xcode 13 now depends on XCTestCore. To keep things future proof, copy over + # everything that Xcode copies. + if (xcode_version_int >= 1300) { + extra_system_frameworks += [ + "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCTestCore.framework", + "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCUIAutomation.framework", + "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCUnit.framework", + "$ios_sdk_platform_path/Developer/usr/lib/libXCTestSwiftSupport.dylib", + ] + } + + # XCTestSupport framework is required as of Xcode 14.3 or later. + if (xcode_version_int >= 1430) { + extra_system_frameworks += [ "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCTestSupport.framework" ] + } + _xctest_bundle = _xctest_target + "_bundle" - if (!is_fat_secondary_toolchain) { - if (!defined(bundle_deps)) { - bundle_deps = [] - } - bundle_deps += [ ":$_xctest_bundle" ] + if (!defined(bundle_deps)) { + bundle_deps = [] } + bundle_deps += [ ":$_xctest_bundle" ] } } @@ -1987,6 +1671,10 @@ template("ios_xcuitest_test_runner_bundle") { _output_name = invoker.output_name } + # Bundle identifier should respect rfc1034, so replace "_" with "-". + _bundle_identifier = "$ios_app_bundle_id_prefix.chrome." + + string_replace(_output_name, "_", "-") + _xctrunner_path = "$ios_sdk_platform_path/Developer/Library/Xcode/Agents/XCTRunner.app" @@ -2016,7 +1704,7 @@ template("ios_xcuitest_test_runner_bundle") { "-o=" + rebase_path(_output_name, root_build_dir), ] + rebase_path(sources, root_build_dir) - if (use_system_xcode && use_goma) { + if (use_system_xcode && (use_goma || use_remoteexec)) { deps = [ "//build/config/ios:copy_xctrunner_app" ] } } @@ -2027,6 +1715,7 @@ template("ios_xcuitest_test_runner_bundle") { executable_name = _output_name info_plist_target = ":$_info_plist_merge_plist" + extra_substitutions = [ "BUNDLE_IDENTIFIER=$_bundle_identifier" ] } bundle_data(_info_plist_bundle) { @@ -2048,7 +1737,7 @@ template("ios_xcuitest_test_runner_bundle") { outputs = [ "{{bundle_contents_dir}}/PkgInfo" ] - if (use_system_xcode && use_goma) { + if (use_system_xcode && (use_goma || use_remoteexec)) { public_deps = [ "//build/config/ios:copy_xctrunner_app" ] } } @@ -2072,6 +1761,22 @@ template("ios_xcuitest_test_runner_bundle") { "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCTAutomationSupport.framework", ] + # Xcode 13 now depends on XCTestCore. To keep things future proof, copy over + # everything that Xcode copies. + if (xcode_version_int >= 1300) { + extra_system_frameworks += [ + "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCTestCore.framework", + "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCUIAutomation.framework", + "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCUnit.framework", + "$ios_sdk_platform_path/Developer/usr/lib/libXCTestSwiftSupport.dylib", + ] + } + + # XCTestSupport framework is required as of Xcode 14.3 or later. + if (xcode_version_int >= 1430) { + extra_system_frameworks += [ "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCTestSupport.framework" ] + } + bundle_deps = [] if (defined(invoker.bundle_deps)) { bundle_deps += invoker.bundle_deps @@ -2096,6 +1801,12 @@ template("ios_xcuitest_test_runner_bundle") { # xcode_test_application_name: # string, name of the test application for the ui test target. # +# runner_only_bundle_deps: +# list of labels of bundle target to include in the runner and +# exclude from the test module (the use case is a framework bundle +# that is used by the test module and thus needs to be packaged in +# the runner application bundle) +# # This template defines two targets, one named "${target_name}_module" is the # xctest dynamic library, and the other named "${target_name}_runner" is the # test runner application bundle. @@ -2133,6 +1844,10 @@ template("ios_xcuitest_test") { output_name = _xcuitest_module_output deps = invoker.deps + + if (defined(invoker.runner_only_bundle_deps)) { + bundle_deps_filter = invoker.runner_only_bundle_deps + } } _xcuitest_runner_output = _xcuitest_target + "-Runner" @@ -2140,6 +1855,13 @@ template("ios_xcuitest_test") { output_name = _xcuitest_runner_output xctest_bundle = _xcuitest_module_target + "_bundle" forward_variables_from(invoker, [ "bundle_deps" ]) + + if (defined(invoker.runner_only_bundle_deps)) { + if (!defined(bundle_deps)) { + bundle_deps = [] + } + bundle_deps += invoker.runner_only_bundle_deps + } } } diff --git a/build/config/ios/strip_arm64e.py b/build/config/ios/strip_arm64e.py index f21baf423db7..56e684fd58c5 100644 --- a/build/config/ios/strip_arm64e.py +++ b/build/config/ios/strip_arm64e.py @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Strip arm64e architecture from a binary if present.""" diff --git a/build/config/ios/swift_source_set.gni b/build/config/ios/swift_source_set.gni new file mode 100644 index 000000000000..0f5cc0764fb2 --- /dev/null +++ b/build/config/ios/swift_source_set.gni @@ -0,0 +1,25 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Defines a template for Swift source files. The default module_name +# of the target is the entire target label (without the leading //) +# with all "/" and ":" replaced with "_". +template("swift_source_set") { + _target_name = target_name + source_set(target_name) { + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + if (!defined(module_name)) { + _target_label = get_label_info(":$_target_name", "label_no_toolchain") + + # Strip the // from the beginning of the label. + _target_label = string_replace(_target_label, "//", "", 1) + module_name = + string_replace(string_replace(_target_label, "/", "_"), ":", "_") + } + } +} +set_defaults("swift_source_set") { + configs = default_compiler_configs +} diff --git a/build/config/ios/write_framework_hmap.py b/build/config/ios/write_framework_hmap.py index ac467ee92ae6..88892534981a 100644 --- a/build/config/ios/write_framework_hmap.py +++ b/build/config/ios/write_framework_hmap.py @@ -1,8 +1,7 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import print_function import os import struct diff --git a/build/config/ios/write_framework_modulemap.py b/build/config/ios/write_framework_modulemap.py index dcc88a8ea695..49f3263d315b 100644 --- a/build/config/ios/write_framework_modulemap.py +++ b/build/config/ios/write_framework_modulemap.py @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/ios/xctest_shell.mm b/build/config/ios/xctest_shell.mm index dcf5bad5e7cc..0fd5ccaf914d 100644 --- a/build/config/ios/xctest_shell.mm +++ b/build/config/ios/xctest_shell.mm @@ -1,4 +1,4 @@ -// Copyright 2016 The Chromium Authors. All rights reserved. +// Copyright 2016 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/config/linux/BUILD.gn b/build/config/linux/BUILD.gn index 47704248bfae..131bb71d1d3b 100644 --- a/build/config/linux/BUILD.gn +++ b/build/config/linux/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -15,6 +15,18 @@ group("linux") { # is applied to all targets. It is here to separate out the logic that is # Linux-only. This is not applied to Android, but is applied to ChromeOS. config("compiler") { + if (current_cpu == "arm64") { + import("//build/config/arm.gni") + cflags = [] + asmflags = [] + if (arm_control_flow_integrity == "standard") { + cflags += [ "-mbranch-protection=standard" ] + asmflags += [ "-mbranch-protection=standard" ] + } else if (arm_control_flow_integrity == "pac") { + cflags += [ "-mbranch-protection=pac-ret" ] + asmflags += [ "-mbranch-protection=pac-ret" ] + } + } } # This is included by reference in the //build/config/compiler:runtime_library @@ -28,8 +40,7 @@ config("runtime_library") { defines = [ "OS_CHROMEOS" ] } - if ((!(is_chromeos_ash || is_chromeos_lacros) || - default_toolchain != "//build/toolchain/cros:target") && + if ((!is_chromeos || default_toolchain != "//build/toolchain/cros:target") && (!use_custom_libcxx || current_cpu == "mipsel")) { libs = [ "atomic" ] } @@ -52,17 +63,8 @@ if (use_glib) { "gthread-2.0", ] defines = [ - "GLIB_VERSION_MAX_ALLOWED=GLIB_VERSION_2_40", - "GLIB_VERSION_MIN_REQUIRED=GLIB_VERSION_2_40", + "GLIB_VERSION_MAX_ALLOWED=GLIB_VERSION_2_56", + "GLIB_VERSION_MIN_REQUIRED=GLIB_VERSION_2_56", ] } } - -# Ensures all exported symbols are added to the dynamic symbol table. This is -# necessary to expose Chrome's custom operator new() and operator delete() (and -# other memory-related symbols) to libraries. Otherwise, they might -# (de)allocate memory on a different heap, which would spell trouble if pointers -# to heap-allocated memory are passed over shared library boundaries. -config("export_dynamic") { - ldflags = [ "-rdynamic" ] -} diff --git a/build/config/linux/atk/BUILD.gn b/build/config/linux/atk/BUILD.gn index bc8e27894732..239c3870a149 100644 --- a/build/config/linux/atk/BUILD.gn +++ b/build/config/linux/atk/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -7,9 +7,8 @@ import("//build/config/features.gni") import("//build/config/linux/pkg_config.gni") import("//build/config/ui.gni") -# TODO(crbug.com/1171629): Change this back to is_chromeos. # CrOS doesn't install GTK or any gnome packages. -assert(!is_chromeos_ash) +assert(!is_chromeos) # These packages should _only_ be expected when building for a target. assert(current_toolchain == default_toolchain) @@ -24,7 +23,7 @@ pkg_config("atk") { "atk-bridge-2.0", ] atk_lib_dir = exec_script(pkg_config_script, - pkg_config_args + [ + common_pkg_config_args + pkg_config_args + [ "--libdir", "atk", ], diff --git a/build/config/linux/atspi2/BUILD.gn b/build/config/linux/atspi2/BUILD.gn index 988a99568136..51b6d33aab3c 100644 --- a/build/config/linux/atspi2/BUILD.gn +++ b/build/config/linux/atspi2/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -12,7 +12,7 @@ if (use_atk) { pkg_config("atspi2") { packages = [ "atspi-2" ] atspi_version = exec_script(pkg_config_script, - pkg_config_args + [ + common_pkg_config_args + pkg_config_args + [ "atspi-2", "--version-as-components", ], diff --git a/build/config/linux/dbus/BUILD.gn b/build/config/linux/dbus/BUILD.gn index f11cf7101cb6..2414c3416737 100644 --- a/build/config/linux/dbus/BUILD.gn +++ b/build/config/linux/dbus/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/linux/dri/BUILD.gn b/build/config/linux/dri/BUILD.gn index 8e3efe670382..e3a0a83a99fe 100644 --- a/build/config/linux/dri/BUILD.gn +++ b/build/config/linux/dri/BUILD.gn @@ -1,15 +1,15 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/config/linux/pkg_config.gni") -assert(is_linux || is_chromeos, "This file should only be referenced on Linux") +assert(is_linux, "This file should only be referenced on Linux") pkg_config("dri") { packages = [ "dri" ] dri_driver_dir = exec_script(pkg_config_script, - pkg_config_args + [ + common_pkg_config_args + pkg_config_args + [ "--dridriverdir", "dri", ], diff --git a/build/config/linux/gtk/BUILD.gn b/build/config/linux/gtk/BUILD.gn index ecf95dda763b..355067ea178b 100644 --- a/build/config/linux/gtk/BUILD.gn +++ b/build/config/linux/gtk/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -7,12 +7,6 @@ import("//build/config/linux/pkg_config.gni") assert(is_linux, "This file should only be referenced on Linux") -declare_args() { - # The (major) version of GTK to build against. A different version may be - # loaded at runtime. - gtk_version = 3 -} - # GN doesn't check visibility for configs so we give this an obviously internal # name to discourage random targets from accidentally depending on this and # bypassing the GTK target's visibility. @@ -33,36 +27,19 @@ pkg_config("gtk_internal_config") { group("gtk") { visibility = [ - # This is the only target that can depend on GTK. Do not add more targets - # to this list. - "//ui/gtk:gtk_stubs", - - # These are allow-listed for WebRTC builds. + # These are allow-listed for WebRTC builds. Nothing in else should depend + # on GTK. "//examples:peerconnection_client", "//remoting/host:common", "//remoting/host:remoting_me2me_host_static", "//remoting/host/file_transfer", "//remoting/host/it2me:common", - "//remoting/host/it2me:remote_assistance_host", + "//remoting/host/it2me:main", "//remoting/host/linux", + "//remoting/host/remote_open_url:common", "//remoting/test:it2me_standalone_host_main", "//webrtc/examples:peerconnection_client", ] public_configs = [ ":gtk_internal_config" ] } - -# Depend on "gtkprint" to get this. -pkg_config("gtkprint_internal_config") { - if (gtk_version == 3) { - packages = [ "gtk+-unix-print-3.0" ] - } else { - assert(gtk_version == 4) - packages = [ "gtk4-unix-print" ] - } -} - -group("gtkprint") { - visibility = [ "//ui/gtk:*" ] - public_configs = [ ":gtkprint_internal_config" ] -} diff --git a/build/config/linux/gtk/gtk.gni b/build/config/linux/gtk/gtk.gni index 1e45248e6772..9e6131d4c0a2 100644 --- a/build/config/linux/gtk/gtk.gni +++ b/build/config/linux/gtk/gtk.gni @@ -1,4 +1,4 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -6,5 +6,9 @@ import("//build/config/ui.gni") declare_args() { # Whether or not we should use libgtk. - use_gtk = is_linux && !is_chromecast + use_gtk = is_linux && !is_castos + + # The (major) version of GTK to build against. A different version may be + # loaded at runtime. + gtk_version = 3 } diff --git a/build/config/linux/libdrm/BUILD.gn b/build/config/linux/libdrm/BUILD.gn index e9b40184ec68..31ab0d8da212 100644 --- a/build/config/linux/libdrm/BUILD.gn +++ b/build/config/linux/libdrm/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/config/chromecast_build.gni") @@ -11,7 +11,7 @@ declare_args() { # Controls whether the build should use the version of libdrm library shipped # with the system. In release builds of desktop Linux and Chrome OS we use the # system version. Some Chromecast devices use this as well. - use_system_libdrm = is_chromeos_device || (is_linux && !is_chromecast) + use_system_libdrm = is_chromeos_device || (is_linux && !is_castos) } if (use_system_libdrm) { diff --git a/build/config/linux/libffi/BUILD.gn b/build/config/linux/libffi/BUILD.gn index 59b7f040c8af..771170c3e870 100644 --- a/build/config/linux/libffi/BUILD.gn +++ b/build/config/linux/libffi/BUILD.gn @@ -1,16 +1,24 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/config/linux/pkg_config.gni") -if (default_toolchain == "//build/toolchain/cros:target") { +declare_args() { + # Controls whether the build should use the version of libffi library shipped + # with the system. By default, we only use the system version on Chrome OS: + # on Linux, libffi must be statically linked to prevent a situation where the + # runtime version of libffi is different from the build-time version from the + # sysroot. + use_system_libffi = default_toolchain == "//build/toolchain/cros:target" +} + +if (use_system_libffi) { pkg_config("libffi") { packages = [ "libffi" ] } } else { - # On Linux, make sure we link against libffi version 6. config("libffi") { - libs = [ ":libffi.so.6" ] + libs = [ ":libffi_pic.a" ] } } diff --git a/build/config/linux/libva/BUILD.gn b/build/config/linux/libva/BUILD.gn index ada5d665fb82..380da0435af2 100644 --- a/build/config/linux/libva/BUILD.gn +++ b/build/config/linux/libva/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/linux/nss/BUILD.gn b/build/config/linux/nss/BUILD.gn index 8c27938b765a..c67cefc148b0 100644 --- a/build/config/linux/nss/BUILD.gn +++ b/build/config/linux/nss/BUILD.gn @@ -1,18 +1,14 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/config/linux/pkg_config.gni") if (is_linux || is_chromeos) { - # This is a dependency on NSS with no libssl. On Linux we use a built-in SSL - # library but the system NSS libraries. Non-Linux platforms using NSS use the - # hermetic one in //third_party/nss. - # - # Generally you should depend on //crypto:platform instead of using this - # config since that will properly pick up NSS or OpenSSL depending on - # platform and build config. - pkg_config("system_nss_no_ssl_config") { + # This is a dependency on NSS with no libssl3. On Linux and Chrome OS, we use + # NSS for platform certificate integration. We use our own TLS library, so + # exclude the one from NSS. + pkg_config("nss") { packages = [ "nss" ] extra_args = [ "-v", diff --git a/build/config/linux/pangocairo/BUILD.gn b/build/config/linux/pangocairo/BUILD.gn index ddcc754bbdd0..e2030b817304 100644 --- a/build/config/linux/pangocairo/BUILD.gn +++ b/build/config/linux/pangocairo/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/linux/pangocairo/pangocairo.gni b/build/config/linux/pangocairo/pangocairo.gni index 6bc75294cdf8..c7662ac33356 100644 --- a/build/config/linux/pangocairo/pangocairo.gni +++ b/build/config/linux/pangocairo/pangocairo.gni @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -6,5 +6,5 @@ import("//build/config/chromeos/ui_mode.gni") import("//build/config/ui.gni") declare_args() { - use_pangocairo = is_linux && !is_chromecast + use_pangocairo = is_linux && !is_castos } diff --git a/build/config/linux/pkg-config.py b/build/config/linux/pkg-config.py index 5adf70cc3bb3..2e38c7ffbd10 100755 --- a/build/config/linux/pkg-config.py +++ b/build/config/linux/pkg-config.py @@ -1,9 +1,8 @@ -#!/usr/bin/env python -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import print_function import json import os diff --git a/build/config/linux/pkg_config.gni b/build/config/linux/pkg_config.gni index 428e44ac0a03..cb9b4600331f 100644 --- a/build/config/linux/pkg_config.gni +++ b/build/config/linux/pkg_config.gni @@ -1,4 +1,4 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -53,9 +53,10 @@ pkg_config_script = "//build/config/linux/pkg-config.py" # need to invoke it manually. pkg_config_args = [] +common_pkg_config_args = [] if (sysroot != "") { # Pass the sysroot if we're using one (it requires the CPU arch also). - pkg_config_args += [ + common_pkg_config_args += [ "-s", rebase_path(sysroot), "-a", @@ -92,9 +93,9 @@ template("pkg_config") { "Variable |packages| must be defined to be a list in pkg_config.") config(target_name) { if (host_toolchain == current_toolchain) { - args = host_pkg_config_args + invoker.packages + args = common_pkg_config_args + host_pkg_config_args + invoker.packages } else { - args = pkg_config_args + invoker.packages + args = common_pkg_config_args + pkg_config_args + invoker.packages } if (defined(invoker.extra_args)) { args += invoker.extra_args diff --git a/build/config/locales.gni b/build/config/locales.gni index e94e1626db72..ed26f3de9bed 100644 --- a/build/config/locales.gni +++ b/build/config/locales.gni @@ -1,157 +1,136 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/config/chromeos/ui_mode.gni") -# This file creates the |locales| which is the set of current -# locales based on the current platform. Locales in this list are formated -# based on what .pak files expect. -# The |locales| variable *may* contain pseudolocales, depending on the -# |enable_pseudolocales| flag. +# This file creates |platform_pak_locales| which is the set of packed locales +# based on the current platform. Locales in this list are formatted based on +# what .pak files expect. The |platform_pak_locales| variable *may* contain +# pseudolocales, depending on the |enable_pseudolocales| flag. # If you specifically want to have the locales variable with or without # pseudolocales, then use |locales_with_pseudolocales| or # |locales_without_pseudolocales|. # The following additional platform specific lists are created: -# - |android_apk_locales| subset for Android based apk builds +# - |extended_locales| list of locales not shipped on desktop builds # - |android_bundle_locales_as_resources| locales formatted for XML output names -# - |locales_as_mac_outputs| formated for mac output bundles -# - |ios_packed_locales| subset for iOS -# - |ios_packed_locales_as_mac_outputs| subset for iOS output +# - |locales_as_apple_outputs| formatted for mac output bundles -# Android doesn't ship all locales in order to save space (but webview does). -# http://crbug.com/369218 -android_apk_omitted_locales = [ - "bn", - "et", - "gu", - "kn", - "ml", - "mr", - "ms", - "ta", - "te", -] - -# Chrome on iOS only ships with a subset of the locales supported by other -# version of Chrome as the corresponding locales are not supported by the -# operating system (but for simplicity, the corresponding .pak files are -# still generated). -ios_unsupported_locales = [ - "am", - "bn", - "et", - "fil", - "gu", - "kn", - "lv", - "ml", - "mr", - "sl", - "sw", - "ta", - "te", +pseudolocales = [ + "ar-XB", + "en-XA", ] -# These list are defined even when not building for Android or iOS for the -# sake of build/locale_tool.py. Ensure that GN doesn't complain about them -# being unused. -not_needed([ "android_apk_omitted_locales" ]) -not_needed([ "ios_unsupported_locales" ]) - # Superset of all locales used in Chrome with platform specific changes noted. -all_chrome_locales = [ - "af", - "am", - "ar", - "as", - "az", - "be", - "bg", - "bn", - "bs", - "ca", - "cs", - "da", - "de", - "el", - "en-GB", - "en-US", - "es", - "es-419", # "es-MX" in iOS (Mexico vs Latin America) "es-US" on Android - "et", - "eu", - "fa", - "fi", - "fil", # "tl" in .xml but "fil" in TC and .pak - "fr", - "fr-CA", - "gl", - "gu", - "he", # "iw" in .xml and TC but "he" in .pak - "hi", - "hr", - "hu", - "hy", - "id", # "in" in .xml but "id" in TC and .pak - "is", - "it", - "ja", - "ka", - "kk", - "km", - "kn", - "ko", - "ky", - "lo", - "lt", - "lv", - "mk", - "ml", - "mn", - "mr", - "ms", - "my", - "nb", # "no" in TC but "nb" in .xml and .pak - "ne", - "nl", - "or", - "pa", - "pl", - "pt-BR", # just "pt" in iOS - "pt-PT", - "ro", - "ru", - "si", - "sk", - "sl", - "sq", - "sr", - "sr-Latn", # -b+sr+Latn in .xml - "sv", - "sw", - "ta", - "te", - "th", - "tr", - "uk", - "ur", - "uz", - "vi", - "zh-CN", - "zh-HK", - "zh-TW", - "zu", -] +all_chrome_locales = + [ + "af", + "am", + "ar", + "as", + "az", + "be", + "bg", + "bn", + "bs", + "ca", + "cs", + "cy", + "da", + "de", + "el", + "en-GB", + "en-US", + "es", + "es-419", # "es-MX" in iOS (Mexico vs Latin America) "es-US" on Android + "et", + "eu", + "fa", + "fi", + "fil", # "tl" in .xml but "fil" in TC and .pak + "fr", + "fr-CA", + "gl", + "gu", + "he", # "iw" in .xml and TC but "he" in .pak + "hi", + "hr", + "hu", + "hy", + "id", # "in" in .xml but "id" in TC and .pak + "is", + "it", + "ja", + "ka", + "kk", + "km", + "kn", + "ko", + "ky", + "lo", + "lt", + "lv", + "mk", + "ml", + "mn", + "mr", + "ms", + "my", + "nb", # "no" in TC but "nb" in .xml and .pak + "ne", + "nl", + "or", + "pa", + "pl", + "pt-BR", # just "pt" in iOS + "pt-PT", + "ro", + "ru", + "si", + "sk", + "sl", + "sq", + "sr", + "sr-Latn", # -b+sr+Latn in .xml + "sv", + "sw", + "ta", + "te", + "th", + "tr", + "uk", + "ur", + "uz", + "vi", + "zh-CN", + "zh-HK", + "zh-TW", + "zu", + ] + pseudolocales -# New locales added to Chrome Android bundle builds. -android_bundle_only_locales = [ - "af", +if (is_ios) { + # Chrome on iOS uses "es-MX" and "pt" for "es-419" and "pt-BR". + all_chrome_locales -= [ + "es-419", + "pt-BR", + ] + all_chrome_locales += [ + "es-MX", + "pt", + ] +} + +# Chrome locales not on Windows, Mac, or Linux. +# This list is used for all platforms except Android. On Android, this list is +# modified to exclude locales that are not used on Android, so +# `platform_pak_locales - extended_locales` works as expected. +extended_locales = [ "as", "az", "be", "bs", + "cy", "eu", "fr-CA", "gl", @@ -171,28 +150,66 @@ android_bundle_only_locales = [ "si", "sq", "sr-Latn", - "ur", "uz", "zh-HK", "zu", ] -# New locales added to ChromeOS builds. -chromeos_only_locales = [ "is" ] +# Chrome locales not on Android. +# These locales have not yet been tested yet. Specifically, AOSP has not been +# translated to Welsh at the time of writing (April 2022): +# https://cs.android.com/android/platform/superproject/+/master:build/make/target/product/languages_default.mk +# Due to this, the only way a user could see Welsh strings - assuming they were +# built - would be to manually switch their "Chrome language" in Chrome's +# language settings to Welsh, so Welsh usage would probably be very low. +_non_android_locales = [ "cy" ] +# Setup |platform_pak_locales| for each platform. +platform_pak_locales = all_chrome_locales if (is_android) { - locales = all_chrome_locales + platform_pak_locales -= _non_android_locales + extended_locales -= _non_android_locales +} else { + platform_pak_locales -= extended_locales +} - # Android doesn't ship all locales on KitKat in order to save space - # (but webview does). http://crbug.com/369218 - android_apk_locales = all_chrome_locales - android_bundle_only_locales - - android_apk_omitted_locales +# The base list for all platforms except Android excludes the extended locales. +# Add or subtract platform specific locales below. +if (is_chromeos) { + platform_pak_locales += [ + "cy", + "eu", + "gl", + "is", + "zu", + ] + platform_pak_locales -= [ "ur" ] +} else if (is_ios) { + platform_pak_locales -= [ + "af", + "am", + "bn", + "et", + "fil", + "gu", + "kn", + "lv", + "ml", + "mr", + "sl", + "sw", + "ta", + "te", + "ur", + ] +} - # List for Android locale names in .xml exports. Note: needs to stay in sync - # with |ToAndroidLocaleName| in build/android/gyp/util/resource_utils.py. +# List for Android locale names in .xml exports. Note: needs to stay in sync +# with |ToAndroidLocaleName| in build/android/gyp/util/resource_utils.py. +if (is_android) { # - add r: (e.g. zh-HK -> zh-rHK ) android_bundle_locales_as_resources = [] - foreach(_locale, locales) { + foreach(_locale, platform_pak_locales) { android_bundle_locales_as_resources += [ string_replace(_locale, "-", "-r") ] } @@ -215,58 +232,30 @@ if (is_android) { "iw", "tl", ] -} else if (is_chromeos_ash || is_chromeos_lacros) { - # In ChromeOS we support a few more locales than standard Chrome. - locales = - all_chrome_locales - android_bundle_only_locales + chromeos_only_locales -} else { - # Change if other platforms support more locales. - locales = all_chrome_locales - android_bundle_only_locales } -# Chrome on iOS uses different names for "es-419" and "pt-BR" (called -# respectively "es-MX" and "pt" on iOS). -if (is_ios) { - locales -= [ - "es-419", - "pt-BR", - ] - locales += [ - "es-MX", - "pt", - ] -} - -pseudolocales = [ - "ar-XB", - "en-XA", -] -locales_without_pseudolocales = locales -locales_with_pseudolocales = locales + pseudolocales +locales_without_pseudolocales = platform_pak_locales - pseudolocales +locales_with_pseudolocales = platform_pak_locales declare_args() { # We want to give pseudolocales to everyone except end-users (devs & QA). + # Note that this only packages the locales in, and doesn't add the ui to enable them. enable_pseudolocales = !is_official_build } -if (enable_pseudolocales) { - # Note that this only packages the locales in, and doesn't add the ui to enable them. - locales += pseudolocales +if (!enable_pseudolocales) { + platform_pak_locales -= pseudolocales } -# Same as the locales list but in the format Mac expects for output files: -# it uses underscores instead of hyphens, and "en" instead of "en-US". -locales_as_mac_outputs = [] -foreach(locale, locales) { - if (locale == "en-US") { - locales_as_mac_outputs += [ "en" ] - } else { - locales_as_mac_outputs += [ string_replace(locale, "-", "_") ] +if (is_apple) { + # Same as the locales list but in the format Mac expects for output files: + # it uses underscores instead of hyphens, and "en" instead of "en-US". + locales_as_apple_outputs = [] + foreach(locale, platform_pak_locales) { + if (locale == "en-US") { + locales_as_apple_outputs += [ "en" ] + } else { + locales_as_apple_outputs += [ string_replace(locale, "-", "_") ] + } } } - -if (is_ios) { - ios_packed_locales = locales - ios_unsupported_locales - ios_packed_locales_as_mac_outputs = - locales_as_mac_outputs - ios_unsupported_locales -} diff --git a/build/config/logging.gni b/build/config/logging.gni index 25fe9913437c..a08195b7ddfb 100644 --- a/build/config/logging.gni +++ b/build/config/logging.gni @@ -1,7 +1,8 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +import("//build/buildflag_header.gni") import("//build/config/chromeos/ui_mode.gni") import("//build/config/dcheck_always_on.gni") @@ -9,4 +10,23 @@ declare_args() { # Use LogErrorNotReached() for NOTREACHED(). enable_log_error_not_reached = is_chromeos_ash && !(is_debug || dcheck_always_on) + enable_stack_trace_line_numbers = false + + # Use runtime vlog everywhere except for ash-chrome. + # When `use_runtime_vlog` is true, + # command line switch `--vmodule=xxx` or `--v=x` could be used to + # control vlog level at runtime. + # when `use_runtime_volog` is false, + # verbose log level is controlled by `ENABLE_VLOG_LEVEL` macro. VLOG(n) + # is kept and generate output if `n` is less than or equal to the vlog + # level defined by the macro. + # Command line switch `--vmodule=xxx`, or `--v=x` would have no effect. + # + # Runtime vlog is used everywhere except on ash-chrome. + # Ash-chrome has a few vmodule patterns that need to be used indefinitely + # to investigate problems from logs in feedback reports. These vmodule + # patterns are using too much cpu cycles (see http://crbug/489441). Turning + # off runtime vlog and using build time vlog would avoid paying that cpu tax + # and have a nice side effect of a smaller production binary. + use_runtime_vlog = !is_chromeos_ash } diff --git a/build/config/mac/BUILD.gn b/build/config/mac/BUILD.gn index 0919208ec575..7af3124d4eba 100644 --- a/build/config/mac/BUILD.gn +++ b/build/config/mac/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -6,6 +6,8 @@ import("//build/config/apple/symbols.gni") import("//build/config/c++/c++.gni") import("//build/config/mac/mac_sdk.gni") import("//build/config/sysroot.gni") +import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") # This is included by reference in the //build/config/compiler config that # is applied to all targets. It is here to separate out the logic. @@ -63,18 +65,12 @@ config("runtime_library") { common_flags = [ "-isysroot", rebase_path(sysroot, root_build_dir), - "-mmacosx-version-min=$mac_deployment_target", + "-mmacos-version-min=$mac_deployment_target", ] asmflags = common_flags cflags = common_flags ldflags = common_flags - - # Prevent Mac OS X AssertMacros.h (included by system header) from defining - # macros that collide with common names, like 'check', 'require', and - # 'verify'. - # http://opensource.apple.com/source/CarbonHeaders/CarbonHeaders-18.1/AssertMacros.h - defines = [ "__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES=0" ] } # On Mac, this is used for everything except static libraries. @@ -90,18 +86,15 @@ config("mac_dynamic_flags") { # Binary.app/Contents/MacOS. "-Wl,-rpath,@loader_path/../../..", ] - } -} -# The ldflags referenced below are handled by -# //build/toolchain/apple/linker_driver.py. -# Remove this config if a target wishes to change the arguments passed to the -# strip command during linking. This config by default strips all symbols -# from a binary, but some targets may wish to specify an exports file to -# preserve specific symbols. -config("strip_all") { - if (enable_stripping) { - ldflags = [ "-Wcrl,strip,-x,-S" ] + # Path for loading shared libraries for unbundled binaries for + # the host toolchain (see https://crbug.com/1315433). Only used + # for when building for iOS. + if (target_os == "ios" && current_toolchain == host_toolchain) { + ldflags += [ "-Wl,-rpath,@loader_path/" + rebase_path( + get_label_info(":mac_dynamic_flags", "root_out_dir"), + root_build_dir) ] + } } } @@ -117,7 +110,8 @@ config("strip_all") { # # The symbolic link for $mac_sdk_path is set up by # //build/config/apple/sdk_info.py in //build/config/mac/mac_sdk.gni. -if (use_system_xcode && use_goma && target_os == "mac") { +if (use_system_xcode && (use_goma || use_remoteexec) && target_os == "mac" && + current_toolchain == default_toolchain) { action("sdk_inputs") { script = "//build/noop.py" outputs = [ @@ -128,5 +122,8 @@ if (use_system_xcode && use_goma && target_os == "mac") { } } else { group("sdk_inputs") { + if (current_toolchain != default_toolchain) { + public_deps = [ ":sdk_inputs($default_toolchain)" ] + } } } diff --git a/build/config/mac/mac_sdk.gni b/build/config/mac/mac_sdk.gni index 56d1bc34e789..db7c236d726f 100644 --- a/build/config/mac/mac_sdk.gni +++ b/build/config/mac/mac_sdk.gni @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -8,40 +8,44 @@ import("//build/config/gclient_args.gni") } import("//build/config/mac/mac_sdk_overrides.gni") import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") import("//build/toolchain/toolchain.gni") assert(current_os == "mac" || current_toolchain == default_toolchain) declare_args() { - # The MACOSX_DEPLOYMENT_TARGET variable used when compiling. This partially + # The `MACOSX_DEPLOYMENT_TARGET` variable used when compiling. This partially # controls the minimum supported version of macOS for Chromium by # affecting the symbol availability rules. This may differ from - # mac_min_system_version when dropping support for older macOSes but where + # `mac_min_system_version` when dropping support for older macOSes but where # additional code changes are required to be compliant with the availability # rules. - # Must be of the form x.x.x for Info.plist files. - mac_deployment_target = "10.11.0" + mac_deployment_target = "10.13" - # The value of the LSMinimmumSystemVersion in Info.plist files. This partially - # controls the minimum supported version of macOS for Chromium by - # affecting the Info.plist. This may differ from mac_deployment_target when - # dropping support for older macOSes. This should be greater than or equal to - # the mac_deployment_target version. - # Must be of the form x.x.x for Info.plist files. - mac_min_system_version = "10.11.0" + # The value of the `LSMinimumSystemVersion` in `Info.plist` files. This + # partially controls the minimum supported version of macOS for Chromium by + # affecting the `Info.plist`. This may differ from `mac_deployment_target` + # when dropping support for older macOSes. This must be greater than or equal + # to the `mac_deployment_target` version. + mac_min_system_version = "10.13" # Path to a specific version of the Mac SDK, not including a slash at the end. # If empty, the path to the lowest version greater than or equal to - # mac_sdk_min is used. + # `mac_sdk_min` is used. mac_sdk_path = "" - # The SDK name as accepted by xcodebuild. + # The SDK name as accepted by `xcodebuild`. mac_sdk_name = "macosx" # The SDK version used when making official builds. This is a single exact # version, not a minimum. If this version isn't available official builds # will fail. - mac_sdk_official_version = "11.1" + mac_sdk_official_version = "13.3" + + # The SDK build version used when making official builds. This is a single + # exact version found at "System/Library/CoreServices/SystemVersion.plist" + # inside the SDK. + mac_sdk_official_build_version = "21E226" # Production builds should use hermetic Xcode. If you want to do production # builds with system Xcode to test new SDKs, set this. @@ -82,11 +86,13 @@ if (!use_system_xcode) { # Goma RBE requires paths relative to source directory. When using system # Xcode, this is done by creating symbolic links in root_build_dir. -if (use_system_xcode && use_goma) { +if (use_system_xcode && (use_goma || use_remoteexec)) { sdk_info_args += [ "--get_sdk_info", "--create_symlink_at", "sdk/xcode_links", + "--root_build_dir", + root_build_dir, ] } sdk_info_args += [ mac_sdk_name ] @@ -94,7 +100,7 @@ sdk_info_args += [ mac_sdk_name ] _mac_sdk_result = exec_script(script_name, sdk_info_args, "scope") xcode_version = _mac_sdk_result.xcode_version xcode_build = _mac_sdk_result.xcode_build -if (mac_sdk_path == "" && use_system_xcode && use_goma) { +if (mac_sdk_path == "" && use_system_xcode && (use_goma || use_remoteexec)) { mac_sdk_path = _mac_sdk_result.sdk_path } @@ -104,11 +110,13 @@ if (use_system_xcode) { find_sdk_args = [ "--print_sdk_path", "--print_bin_path", + "--print_sdk_build", mac_sdk_min, ] find_sdk_lines = exec_script("//build/mac/find_sdk.py", find_sdk_args, "list lines") - mac_sdk_version = find_sdk_lines[2] + mac_sdk_version = find_sdk_lines[3] + mac_sdk_build_version = find_sdk_lines[2] if (mac_sdk_path == "") { mac_sdk_path = find_sdk_lines[0] mac_bin_path = find_sdk_lines[1] @@ -117,6 +125,7 @@ if (use_system_xcode) { } } else { mac_sdk_version = mac_sdk_official_version + mac_sdk_build_version = mac_sdk_official_build_version _dev = _hermetic_xcode_path + "/Contents/Developer" _sdk = "MacOSX${mac_sdk_version}.sdk" mac_sdk_path = _dev + "/Platforms/MacOSX.platform/Developer/SDKs/$_sdk" diff --git a/build/config/mac/mac_sdk_overrides.gni b/build/config/mac/mac_sdk_overrides.gni index 17eb3d9f6236..de58f3f72c8b 100644 --- a/build/config/mac/mac_sdk_overrides.gni +++ b/build/config/mac/mac_sdk_overrides.gni @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/mac/package_framework.py b/build/config/mac/package_framework.py index 0026f466b462..a9210eba65b6 100644 --- a/build/config/mac/package_framework.py +++ b/build/config/mac/package_framework.py @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/mac/prepare_framework_version.py b/build/config/mac/prepare_framework_version.py index db9215069877..0e9daeba989d 100644 --- a/build/config/mac/prepare_framework_version.py +++ b/build/config/mac/prepare_framework_version.py @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/mac/rules.gni b/build/config/mac/rules.gni index fa9eebb192d2..f613a049bdfa 100644 --- a/build/config/mac/rules.gni +++ b/build/config/mac/rules.gni @@ -1,4 +1,4 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -40,18 +40,17 @@ template("mac_info_plist") { apple_info_plist(target_name) { format = "xml1" - extra_substitutions = [] - if (defined(invoker.extra_substitutions)) { - extra_substitutions = invoker.extra_substitutions - } - extra_substitutions += [ - "MAC_SDK_BUILD=$mac_sdk_version", + extra_substitutions = [ + "MAC_SDK_BUILD=$mac_sdk_build_version", "MAC_SDK_NAME=$mac_sdk_name$mac_sdk_version", "MACOSX_DEPLOYMENT_TARGET=$mac_deployment_target", "CHROMIUM_MIN_SYSTEM_VERSION=$mac_min_system_version", "XCODE_BUILD=$xcode_build", "XCODE_VERSION=$xcode_version", ] + if (defined(invoker.extra_substitutions)) { + extra_substitutions += invoker.extra_substitutions + } plist_templates = [ "//build/config/mac/BuildInfo.plist", _info_plist, @@ -303,7 +302,7 @@ template("mac_framework_bundle") { _framework_public_config = _target_name + "_public_config" config(_framework_public_config) { - visibility = [ ":$_framework_target" ] + visibility = [ ":$_framework_target+link" ] framework_dirs = [ root_out_dir ] frameworks = [ _framework_name ] } @@ -438,6 +437,10 @@ template("mac_app_bundle") { _output_extension = "xpc" _product_type = "com.apple.product-type.xpc-service" _write_pkg_info = false + } else if (_package_type == "bundle") { + _output_extension = "bundle" + _product_type = "com.apple.product-type.bundle" + _write_pkg_info = false } else { assert(false, "Unsupported packge_type: " + packge_type) } diff --git a/build/config/mips.gni b/build/config/mips.gni index 6365088b14ac..986ffcb45c49 100644 --- a/build/config/mips.gni +++ b/build/config/mips.gni @@ -1,4 +1,4 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/nacl/BUILD.gn b/build/config/nacl/BUILD.gn index c0c5282b3d15..d60994291e4e 100644 --- a/build/config/nacl/BUILD.gn +++ b/build/config/nacl/BUILD.gn @@ -17,7 +17,7 @@ config("nacl_defines") { defines = [ "__STDC_LIMIT_MACROS=1" ] } - if (current_cpu == "pnacl" && !is_nacl_nonsfi) { + if (current_cpu == "pnacl") { # TODO: Remove the following definition once NACL_BUILD_ARCH and # NACL_BUILD_SUBARCH are defined by the PNaCl toolchain. defines += [ "NACL_BUILD_ARCH=pnacl" ] @@ -43,6 +43,17 @@ config("nacl_warnings") { } } +config("nacl_static_libstdc++") { + # The sysroot of linux x86 bots can have a different version of libstdc++ + # than the one that is on the bots natively. Linking dynamically against + # libstdc++ can then lead to linking against symbols that are not found when + # running the executable. + # Therefore, link statically instead. + if (is_linux && current_cpu == "x86") { + ldflags = [ "-static-libstdc++" ] + } +} + # The base target that all targets in the NaCl build should depend on. # This allows configs to be modified for everything in the NaCl build, even when # the NaCl build is composed into the Chrome build. (GN has no functionality to @@ -52,6 +63,7 @@ source_set("nacl_base") { public_configs = [ ":nacl_defines", ":nacl_warnings", + ":nacl_static_libstdc++", ] if (current_os == "nacl") { public_configs += [ ":nexe_defines" ] @@ -73,29 +85,6 @@ config("compiler") { # everywhere for consistency (and possibly quicker builds). cflags += [ "-integrated-as" ] } - if (is_nacl_nonsfi) { - cflags += [ "--pnacl-allow-translate" ] - ldflags += [ - "--pnacl-allow-translate", - "--pnacl-allow-native", - "-Wl,--noirt", - "-Wt,--noirt", - "-Wt,--noirtshim", - - # The clang driver automatically injects -lpthread when using libc++, but - # the toolchain doesn't have it yet. To get around this, use - # -nodefaultlibs and make each executable target depend on - # "//native_client/src/nonsfi/irt:nacl_sys_private". - "-nodefaultlibs", - ] - libs += [ - "c++", - "m", - "c", - "pnaclmm", - ] - include_dirs = [ "//native_client/src/public/linux_syscalls" ] - } asmflags = cflags } diff --git a/build/config/nacl/config.gni b/build/config/nacl/config.gni index 77e15fc51cb3..c8062b41a818 100644 --- a/build/config/nacl/config.gni +++ b/build/config/nacl/config.gni @@ -1,16 +1,19 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. declare_args() { - # Native Client supports both Newlib and Glibc C libraries where Newlib - # is assumed to be the default one; use this to determine whether Glibc - # is being used instead. + # Native Client supports multiple toolchains: + # - nacl_glibc, based on gcc and glibc. + # - pnacl_newlib, based on llvm 3.7 and newlib (default). + # - saigo_newlib, based on llvm 12+ and newlib. + + # True if nacl_glibc is used. is_nacl_glibc = false -} -is_nacl_irt = false -is_nacl_nonsfi = false + # True if saigo_newlib is used. + is_nacl_saigo = false +} nacl_toolchain_dir = "//native_client/toolchain/${host_os}_x86" @@ -46,10 +49,5 @@ nacl_toolchain_tooldir = "${nacl_toolchain_dir}/${nacl_toolchain_package}/${_nacl_tuple}" nacl_toolprefix = "${nacl_toolchain_bindir}/${_nacl_tuple}-" -nacl_irt_toolchain = "//build/toolchain/nacl:irt_" + target_cpu +nacl_irt_toolchain = "//build/toolchain/nacl:irt_" + current_cpu is_nacl_irt = current_toolchain == nacl_irt_toolchain - -# Non-SFI mode is a lightweight sandbox used by Chrome OS for running ARC -# applications. -nacl_nonsfi_toolchain = "//build/toolchain/nacl:newlib_pnacl_nonsfi" -is_nacl_nonsfi = current_toolchain == nacl_nonsfi_toolchain diff --git a/build/config/nacl/host_toolchain.gni b/build/config/nacl/host_toolchain.gni new file mode 100644 index 000000000000..09b93b14a135 --- /dev/null +++ b/build/config/nacl/host_toolchain.gni @@ -0,0 +1,18 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# The compiler for the trusted nacl_helper_bootstrap binary. +nacl_bootstrap_compiler = "g++" +if (default_toolchain == "//build/toolchain/cros:target") { + import("//build/toolchain/cros_toolchain.gni") + if (target_cpu == "arm64" && current_cpu == "arm") { + nacl_bootstrap_compiler = cros_nacl_helper_arm32_cxx + } else { + nacl_bootstrap_compiler = cros_target_cxx + } +} else if (current_cpu == "arm" && !is_android) { + nacl_bootstrap_compiler = "arm-linux-gnueabihf-g++" +} else if (current_cpu == "mipsel" && !is_android) { + nacl_bootstrap_compiler = "mipsel-linux-gnu-g++" +} diff --git a/build/config/nacl/rules.gni b/build/config/nacl/rules.gni index c08357c40652..a15d32622c02 100644 --- a/build/config/nacl/rules.gni +++ b/build/config/nacl/rules.gni @@ -3,7 +3,6 @@ # found in the LICENSE file. import("//build/config/nacl/config.gni") -import("//build/config/python.gni") # Generate a nmf file # @@ -20,8 +19,7 @@ template("generate_nmf") { assert(defined(invoker.executables), "Must define executables") assert(defined(invoker.nmf), "Must define nmf") - # TODO(crbug.com/1112471): Get this to run cleanly under Python 3. - python2_action(target_name) { + action(target_name) { forward_variables_from(invoker, [ "deps", @@ -130,53 +128,3 @@ template("generate_nmf") { } } } - -# Generate a nmf file for Non-SFI tests -# -# Non-SFI tests use a different manifest format from regular Native Client and -# as such requires a different generator. -# -# Variables: -# executable: Non-SFI .nexe executable to generate nmf for -# nmf: the name and the path of the output file -# nmfflags: additional flags for the nmf generator -template("generate_nonsfi_test_nmf") { - assert(defined(invoker.executable), "Must define executable") - assert(defined(invoker.nmf), "Must define nmf") - - action(target_name) { - forward_variables_from(invoker, - [ - "deps", - "data_deps", - "executable", - "nmf", - "testonly", - "public_deps", - "visibility", - ]) - - script = "//ppapi/tests/create_nonsfi_test_nmf.py" - sources = [ executable ] - outputs = [ nmf ] - - # NOTE: We use target_cpu rather than current_cpu on purpose because - # current_cpu is always going to be pnacl for Non-SFI, but the Non-SFI - # .nexe executable is always translated to run on the target machine. - if (target_cpu == "x86") { - arch = "x86-32" - } else if (target_cpu == "x64") { - arch = "x86-64" - } else { - arch = target_cpu - } - args = [ - "--program=" + rebase_path(executable, root_build_dir), - "--arch=${arch}", - "--output=" + rebase_path(nmf, root_build_dir), - ] - if (defined(invoker.nmfflags)) { - args += invoker.nmfflags - } - } -} diff --git a/build/config/ozone.gni b/build/config/ozone.gni index a14eb93cec47..00eac5ee5bd3 100644 --- a/build/config/ozone.gni +++ b/build/config/ozone.gni @@ -1,12 +1,17 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/config/chromecast_build.gni") import("//build/config/chromeos/ui_mode.gni") -import("//build/config/ui.gni") import("//build/toolchain/toolchain.gni") +declare_args() { + # Indicates if Ozone is enabled. Ozone is a low-level library layer for Linux + # that does not require X11. + use_ozone = is_chromeos || is_fuchsia || is_linux && !is_starboard +} + declare_args() { # Ozone extra platforms file path. Can be overridden to build out of # tree ozone platforms. @@ -46,15 +51,15 @@ declare_args() { # Compile the 'scenic' platform. ozone_platform_scenic = false + # Compile the 'flatland' platform. + ozone_platform_flatland = false + # Compile the 'x11' platform. ozone_platform_x11 = false # Compile the 'wayland' platform. ozone_platform_wayland = false - # Compile the 'windows' platform. - ozone_platform_windows = false - if (ozone_auto_platforms) { # Use headless as the default platform unless modified below. ozone_platform = "headless" @@ -62,15 +67,15 @@ declare_args() { if (is_cast_audio_only) { # Just use headless for audio-only Cast platforms. - } else if (is_chromecast && !is_fuchsia) { - # Enable the Cast ozone platform on all A/V Cast builds except Fuchsia. + } else if (is_castos) { + # Enable the Cast ozone platform on all video CastOS builds. ozone_platform_cast = true # For visual desktop Chromecast builds, override the default "headless" # platform with --ozone-platform=x11. - # TODO(halliwell): Create a libcast_graphics implementation for desktop - # using X11, and disable this platform. - if (is_cast_desktop_build && !is_cast_audio_only) { + # NOTE: The CQ is one such case. + if (target_os == "linux" && + (target_cpu == "x86" || target_cpu == "x64")) { ozone_platform_x11 = true } else { ozone_platform = "cast" @@ -79,16 +84,17 @@ declare_args() { ozone_platform = "x11" ozone_platform_drm = true ozone_platform_x11 = true - } else if (is_linux || is_chromeos_lacros) { + } else if (is_chromeos_lacros) { + ozone_platform = "wayland" + ozone_platform_wayland = true + } else if (is_linux) { ozone_platform = "x11" ozone_platform_wayland = true ozone_platform_x11 = true - } else if (is_win) { - ozone_platform = "windows" - ozone_platform_windows = true } else if (is_fuchsia) { - ozone_platform = "scenic" + ozone_platform = "flatland" ozone_platform_scenic = true + ozone_platform_flatland = true } } @@ -108,9 +114,9 @@ ozone_external_platform_visibility = [ "$_ozone_extra_directory/*" ] if (is_a_target_toolchain) { assert(use_ozone || !(ozone_platform_cast || ozone_platform_drm || + ozone_platform_flatland || ozone_platform_headless || ozone_platform_x11 || - ozone_platform_wayland || ozone_platform_windows || - ozone_platform_scenic), + ozone_platform_wayland || ozone_platform_scenic), "Must set use_ozone to select ozone platforms") } diff --git a/build/config/ozone_extra.gni b/build/config/ozone_extra.gni index 57fa791354c4..cbbca1efe223 100644 --- a/build/config/ozone_extra.gni +++ b/build/config/ozone_extra.gni @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -18,7 +18,16 @@ ozone_external_platform_deps = [] # ozone_external_platform_test_deps = [ "platform/foo1:foo1_unitests", ... ] ozone_external_platform_test_deps = [] +# If a platform has integration tests, the corresponding source_set can be +# listed here so that they get included into ozone_integration_tests. +ozone_external_platform_integration_test_deps = [] + # If a platform has test support files for ui, the corresponding source_set can # be listed here so that they get included into ui_test_support. # ozone_external_platform_ui_test_support_deps = [ "platform/foo1:ui_test_support", ... ] ozone_external_platform_ui_test_support_deps = [] + +# If a platform has a test support for interactive_ui_tests, the corresponding +# source_set can be listed here so that they can included into +# interactive_ui_tests. +ozone_external_interactive_ui_tests_deps = [] diff --git a/build/config/pch.gni b/build/config/pch.gni index 431f70ffe88e..9cb42033189f 100644 --- a/build/config/pch.gni +++ b/build/config/pch.gni @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -10,5 +10,6 @@ declare_args() { # but for distributed build system uses (like goma or rbe) or when # doing official builds. # On Linux it slows down the build, so don't enable it by default. - enable_precompiled_headers = !is_official_build && !(use_goma || use_rbe) && !is_linux && !is_starboard + enable_precompiled_headers = + !is_official_build && !(use_goma || use_remoteexec) && !is_linux && !is_starboard } diff --git a/build/config/posix/BUILD.gn b/build/config/posix/BUILD.gn index e61554c56564..8312d7ecf62c 100644 --- a/build/config/posix/BUILD.gn +++ b/build/config/posix/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -25,7 +25,7 @@ config("runtime_library") { defines = [] ldflags = [] - if (!is_apple && sysroot != "") { + if (!is_apple && sysroot != "" && current_os != "zos") { # Pass the sysroot to all C compiler variants, the assembler, and linker. sysroot_flags = [ "--sysroot=" + rebase_path(sysroot, root_build_dir) ] if (is_linux || is_chromeos) { @@ -33,12 +33,12 @@ config("runtime_library") { # when turning the sysroot on or off. (defines are passed via the command # line, and build system rebuilds things when their commandline # changes). Nothing should ever read this define. - sysroot_hash = + sysroot_key = exec_script("//build/linux/sysroot_scripts/install-sysroot.py", - [ "--print-hash=$current_cpu" ], + [ "--print-key=$current_cpu" ], "trim string", [ "//build/linux/sysroot_scripts/sysroots.json" ]) - defines += [ "CR_SYSROOT_HASH=$sysroot_hash" ] + defines += [ "CR_SYSROOT_KEY=$sysroot_key" ] } asmflags += sysroot_flags ldflags += sysroot_flags @@ -55,18 +55,5 @@ config("runtime_library") { cflags_cc += sysroot_flags cflags_objc += sysroot_flags cflags_objcc += sysroot_flags - - # Need to get some linker flags out of the sysroot. - ld_paths = exec_script("sysroot_ld_path.py", - [ - rebase_path("//build/linux/sysroot_ld_path.sh", - root_build_dir), - rebase_path(sysroot), - ], - "list lines") - foreach(ld_path, ld_paths) { - ld_path = rebase_path(ld_path, root_build_dir) - ldflags += [ "-L" + ld_path ] - } } } diff --git a/build/config/posix/sysroot_ld_path.py b/build/config/posix/sysroot_ld_path.py deleted file mode 100644 index b45aa0de4979..000000000000 --- a/build/config/posix/sysroot_ld_path.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# This file takes two arguments, the relative location of the shell script that -# does the checking, and the name of the sysroot. - -# TODO(brettw) the build/linux/sysroot_ld_path.sh script should be rewritten in -# Python in this file. - -from __future__ import print_function - -import subprocess -import sys - -if len(sys.argv) != 3: - print("Need two arguments") - sys.exit(1) - -result = subprocess.check_output([sys.argv[1], - sys.argv[2]]).strip().decode("utf-8") -result = result.replace(" ", "\n") -if result != "": - print(result) diff --git a/build/config/profiling/OWNERS b/build/config/profiling/OWNERS index 225ce184ca74..ea1bcbbb28cc 100644 --- a/build/config/profiling/OWNERS +++ b/build/config/profiling/OWNERS @@ -1,3 +1 @@ -liaoyuke@chromium.org -sajjadm@chromium.org -sebmarchand@chromium.org +pasthana@google.com \ No newline at end of file diff --git a/build/config/profiling/profiling.gni b/build/config/profiling/profiling.gni index 9ca3d4aa1bd5..d30ef7456990 100644 --- a/build/config/profiling/profiling.gni +++ b/build/config/profiling/profiling.gni @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/python.gni b/build/config/python.gni index 89fa3179939a..f3431fcc0dc5 100644 --- a/build/config/python.gni +++ b/build/config/python.gni @@ -1,4 +1,4 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -63,96 +63,6 @@ template("python_library") { } } -_is_python2 = exec_script("//build/util/is_python2.py", [], "json") - -# This is a wrapper around action() that ensures that the script is -# run under a Python2 executable, even if the main script_executable is -# Python3. -# -# It supports all of action()'s arguments. -# -# TODO(crbug.com/1112471): Remove this once everything runs cleanly under -# Python3. -template("python2_action") { - action(target_name) { - # Forward all variables. Ensure that testonly and visibility are forwarded - # explicitly, since this performs recursive scope lookups, which is - # required to ensure their definition from scopes above the caller are - # properly handled. All other variables are forwarded with "*", which - # doesn't perform recursive lookups at all. See https://crbug.com/862232 - forward_variables_from(invoker, - [ - "testonly", - "visibility", - ]) - forward_variables_from(invoker, - "*", - [ - "testonly", - "visibility", - ]) - - if (!_is_python2) { - script = "//build/util/python2_action.py" - _rebased_script = rebase_path(invoker.script, root_build_dir) - inputs = [] - inputs = [ invoker.script ] - if (defined(invoker.inputs)) { - inputs += invoker.inputs - } - args = [] - args = [ _rebased_script ] - if (defined(invoker.args)) { - args += invoker.args - } - } - } -} - -# This is a wrapper around action_foreach() that ensures that the script is -# run under a Python2 executable, even if the main script_executable is -# Python3. -# -# It supports all of action_foreach()'s arguments. -# -# TODO(crbug.com/1112471): Remove this once everything runs cleanly under -# Python3. -template("python2_action_foreach") { - action_foreach(target_name) { - # Forward all variables. Ensure that testonly and visibility are forwarded - # explicitly, since this performs recursive scope lookups, which is - # required to ensure their definition from scopes above the caller are - # properly handled. All other variables are forwarded with "*", which - # doesn't perform recursive lookups at all. See https://crbug.com/862232 - forward_variables_from(invoker, - [ - "testonly", - "visibility", - ]) - forward_variables_from(invoker, - "*", - [ - "testonly", - "visibility", - ]) - - if (!_is_python2) { - script = "//build/util/python2_action.py" - _rebased_script = rebase_path(invoker.script, root_build_dir) - inputs = [] - inputs = [ invoker.script ] - if (defined(invoker.inputs)) { - inputs += invoker.inputs - } - args = [] - args = [ _rebased_script ] - if (defined(invoker.args)) { - args += invoker.args - } - } - } -} - # A template used for actions that execute a Python script, which has an # associated .pydeps file. In other words: # @@ -172,7 +82,6 @@ template("python2_action_foreach") { # } template("action_with_pydeps") { action(target_name) { - # Forward all variables except run_under_python2. # Ensure that testonly and visibility are forwarded # explicitly, since this performs recursive scope lookups, which is # required to ensure their definition from scopes above the caller are @@ -186,7 +95,6 @@ template("action_with_pydeps") { forward_variables_from(invoker, "*", [ - "run_under_python2", "testonly", "visibility", ]) @@ -208,21 +116,11 @@ template("action_with_pydeps") { # expects paths that are relative to the current BUILD.gn _script_dir = get_path_info(_pydeps_file, "dir") inputs += rebase_path(_pydeps_entries, ".", _script_dir) - - if (defined(invoker.run_under_python2) && invoker.run_under_python2 && - !_is_python2) { - inputs += [ invoker.script ] - _args = args - args = [] - args = [ rebase_path(invoker.script, root_build_dir) ] + _args - script = "//build/util/python2_action.py" - } } } template("action_foreach_with_pydeps") { action_foreach(target_name) { - # Forward all variables execept run_under_python2. # Ensure that testonly and visibility are forwarded # explicitly, since this performs recursive scope lookups, which is # required to ensure their definition from scopes above the caller are @@ -236,7 +134,6 @@ template("action_foreach_with_pydeps") { forward_variables_from(invoker, "*", [ - "run_under_python2", "testonly", "visibility", ]) @@ -260,14 +157,5 @@ template("action_foreach_with_pydeps") { # expects paths that are relative to the current BUILD.gn _script_dir = get_path_info(script, "dir") inputs += rebase_path(_pydeps_entries, ".", _script_dir) - - if (defined(invoker.run_under_python2) && invoker.run_under_python2 && - !_is_python2) { - inputs += [ invoker.script ] - _args = args - args = [] - args = [ rebase_path(invoker.script, root_build_dir) ] + _args - script = "//build/util/python2_action.py" - } } } diff --git a/build/config/riscv.gni b/build/config/riscv.gni new file mode 100644 index 000000000000..b9597a0a9d66 --- /dev/null +++ b/build/config/riscv.gni @@ -0,0 +1,19 @@ +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/v8_target_cpu.gni") + +if (current_cpu == "riscv64" || v8_current_cpu == "riscv64") { + declare_args() { + # RISCV Vector extension compilation flag. + riscv_use_rvv = false + + # RISCV Vector extension VELEN. Possible values are: + # 128 + # 256 + # 512 + # 1024 + riscv_rvv_vlen = 128 + } +} diff --git a/build/config/rust.gni b/build/config/rust.gni new file mode 100644 index 000000000000..b05e37015287 --- /dev/null +++ b/build/config/rust.gni @@ -0,0 +1,310 @@ +# Copyright 2021 The Chromium Project. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chrome_build.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/toolchain/toolchain.gni") + +if (is_android) { + import("//build/config/android/config.gni") +} + +declare_args() { + # Whether to allow Rust code to be part of the Chromium *build process*. + # This can be used to create Rust test binaries, even if the flag below + # is false. + # This only applies to Chromium itself, so the build_with_chromium check + # should not be removed. + # TODO(crbug.com/1386212): Mac + # TODO(crbug.com/1271215): Windows + # TODO(crbug.com/1426472): use_clang_coverage + # TODO(crbug.com/1427362): using_sanitizer + # TODO(crbug.com/1427364): target_cpu != "x86" + # There is no specific bug for !is_official_build or other platforms, since + # this is just a matter of rolling things out slowly and carefully and there + # may be no actual bugs there. + enable_rust = (is_linux || is_android) && !is_official_build && + !using_sanitizer && target_cpu != "x86" && + !use_clang_coverage && is_clang && build_with_chromium + + # As we incrementally enable Rust on mainstream builders, we want to enable + # the toolchain (by switching 'enable_rust' to true) while still disabling + # almost all Rust features). Yet we still want to have some builders with + # all Rust features enabled. + enable_all_rust_features = false + + # Use the Rust toolchain built in-tree. See //tools/rust. + use_chromium_rust_toolchain = true + + # Build libstd locally with GN and use that instead of the prebuilts, where + # applicable. If this is false the prebuilt libstd will always be used. If + # true, the local build is only used with the Chromium Rust toolchain and only + # on supported platforms and GN targets. + enable_local_libstd = true + + # Chromium currently has a Rust toolchain for Android and Linux, but + # if you wish to experiment on more platforms you can use this + # argument to specify an alternative toolchain. + # This should be an absolute path to a directory + # containing a 'bin' directory and others. Commonly + # /.rustup/toolchains/nightly-- + rust_sysroot_absolute = "" + + # If you're using an external Rust toolchain, set this to a Rust + # the output of rustc -V. + rustc_version = "" + + # If you're using a Rust toolchain as specified by rust_sysroot_absolute, + # you can specify whether it supports nacl here. + rust_toolchain_supports_nacl = false + + # Any extra std rlibs in your Rust toolchain, relative to the standard + # Rust toolchain. Typically used with 'rust_sysroot_absolute' + added_rust_stdlib_libs = [] + + # Any removed std rlibs in your Rust toolchain, relative to the standard + # Rust toolchain. Typically used with 'rust_sysroot_absolute' + removed_rust_stdlib_libs = [] + + # Non-rlib libs provided in the toolchain sysroot. Usually this is empty, but + # e.g. the Android Rust Toolchain provides a libunwind.a that rustc expects. + extra_sysroot_libs = [] + + # Use goma for Rust builds. Experimental. The only known problem is + # b/193072381, but then again, we don't expect a build speedup before much + # more work is done. + use_goma_rust = false + + # The host toolchain to use when you don't want sanitizers enabled. By default + # it is the regular toolchain, but when that toolchain has sanitizers, then + # this variable is changed to avoid them. + host_toolchain_no_sanitizers = host_toolchain +} + +declare_args() { + # Use a separate declare_args so these variables' defaults can depend on the + # ones above. + + # When true, uses the locally-built std in all Rust targets. + # + # As an internal implementation detail this can be overridden on specific + # targets (e.g. to run build.rs scripts while building std), but this + # generally should not be done. + use_local_std_by_default = enable_local_libstd && use_chromium_rust_toolchain + + # Individual Rust components. + + # Conversions between Rust types and C++ types. + enable_rust_base_conversions = enable_all_rust_features + + # The base::JSONReader implementation. Requires base conversions. + enable_rust_json = enable_all_rust_features + + # Support for chrome://crash-rust to check crash dump collection works. + enable_rust_crash = enable_all_rust_features + + # Support for Rust mojo bindings. + enable_rust_mojo = enable_all_rust_features + + # Support for the 'gnrt' Rust tool. + enable_rust_gnrt = enable_all_rust_features + + # Rust gtest interop + enable_rust_gtest_interop = enable_all_rust_features + + # Enable Boringssl Rust bindings generation + enable_rust_boringssl = enable_all_rust_features +} + +# Platform support for "official" toolchains (Android or Chromium) +android_toolchain_supports_platform = + (!is_nacl && + (is_android && (current_cpu == "arm" || current_cpu == "arm64" || + current_cpu == "x64" || current_cpu == "x86"))) || + (is_linux && current_cpu == "x64") +chromium_toolchain_supports_platform = !is_nacl +custom_toolchain_supports_platform = !is_nacl || rust_toolchain_supports_nacl + +toolchain_has_rust = + enable_rust && + ((use_chromium_rust_toolchain && chromium_toolchain_supports_platform) || + (!use_chromium_rust_toolchain && android_toolchain_supports_platform) || + (rust_sysroot_absolute != "" && custom_toolchain_supports_platform)) + +# The rustc_revision is used to introduce a dependency on the toolchain version +# (so e.g. rust targets are rebuilt, and the standard library is re-copied when +# the toolchain changes). It is left empty for custom toolchains. +rustc_revision = "" +if (toolchain_has_rust) { + if (use_chromium_rust_toolchain) { + update_rust_args = [ "--print-package-version" ] + rustc_revision = exec_script("//tools/rust/update_rust.py", + update_rust_args, + "trim string") + } else if (rust_sysroot_absolute != "") { + rustc_revision = rustc_version + } else { + # Android toolchain version. + rustc_revision = "rustc 1.64.0-dev (Android Rust Toolchain version 9099361)" + } +} + +# TODO(crbug.com/1278030): To build unit tests for Android we need to build +# them as a dylib and put them into an APK. We should reuse all the same logic +# for gtests from the `//testing/test:test` template. +can_build_rust_unit_tests = toolchain_has_rust && !is_android + +# Whether to build chrome://crash/rust support. +build_rust_crash = toolchain_has_rust && enable_rust_crash + +# We want to store rust_sysroot as a source-relative variable for ninja +# portability. In practice if an external toolchain was specified, it might +# be an absolute path, but we'll do our best. +if (enable_rust) { + if (rust_sysroot_absolute != "") { + rust_sysroot = get_path_info(rust_sysroot_absolute, "abspath") + use_unverified_rust_toolchain = true + } else if (use_chromium_rust_toolchain) { + rust_sysroot = "//third_party/rust-toolchain" + use_unverified_rust_toolchain = false + } else { + if (host_os != "linux") { + assert(false, + "Attempt to use Android Rust toolchain on an unsupported platform") + } + + rust_sysroot = "//third_party/android_rust_toolchain/toolchain" + use_unverified_rust_toolchain = false + extra_sysroot_libs += [ "libunwind.a" ] + } +} + +# Figure out the Rust target triple (aka 'rust_abi_target') +# +# This is here rather than in the toolchain files because it's used also by +# //build/rust/std to find the Rust standard library and construct a sysroot for +# rustc invocations. +# +# The list of architectures supported by Rust is here: +# https://doc.rust-lang.org/nightly/rustc/platform-support.html. We map Chromium +# targets to Rust targets comprehensively despite not having official support +# (see '*_toolchain_supports_platform above') to enable experimentation with +# other toolchains. +rust_abi_target = "" +if (is_linux || is_chromeos) { + cpu = current_cpu + if (cpu == "arm64") { + cpu = "aarch64" + } else if (cpu == "x64") { + cpu = "x86_64" + } + rust_abi_target = cpu + "-unknown-linux-gnu" +} else if (is_android) { + import("//build/config/android/abi.gni") + rust_abi_target = android_abi_target + if (rust_abi_target == "arm-linux-androideabi") { + # Android clang target specifications mostly match Rust, but this + # is an exception + rust_abi_target = "armv7-linux-androideabi" + } +} else if (is_fuchsia) { + if (current_cpu == "arm64") { + rust_abi_target = "aarch64-fuchsia" + } else if (current_cpu == "x64") { + rust_abi_target = "x86_64-fuchsia" + } else { + assert(false, "Architecture not supported") + } +} else if (is_ios) { + if (current_cpu == "arm64") { + rust_abi_target = "aarch64-apple-ios" + } else if (current_cpu == "arm") { + # There's also an armv7s-apple-ios, which targets a more recent ARMv7 + # generation CPU found in later iPhones. We'll go with the older one for + # maximal compatibility. As we come to support all the different platforms + # with Rust, we might want to be more precise here. + rust_abi_target = "armv7-apple-ios" + } else if (current_cpu == "x64") { + rust_abi_target = "x86_64-apple-ios" + } else if (current_cpu == "x86") { + rust_abi_target = "i386-apple-ios" + } else { + assert(false, "Architecture not supported") + } +} else if (is_mac) { + if (current_cpu == "arm64") { + rust_abi_target = "aarch64-apple-darwin" + } else if (current_cpu == "x64") { + rust_abi_target = "x86_64-apple-darwin" + } else { + assert(false, "Architecture not supported") + } +} else if (is_win) { + if (current_cpu == "arm64") { + rust_abi_target = "aarch64-pc-windows-msvc" + } else if (current_cpu == "x86" || current_cpu == "x64") { + rust_abi_target = "x86_64-pc-windows-msvc" + } else { + assert(false, "Architecture not supported") + } +} + +assert(!toolchain_has_rust || rust_abi_target != "") + +# This variable is passed to the Rust libstd build. +rust_target_arch = "" +if (current_cpu == "x86") { + rust_target_arch = "x86" +} else if (current_cpu == "x64") { + rust_target_arch = "x86_64" +} else if (current_cpu == "arm") { + rust_target_arch = "arm" +} else if (current_cpu == "arm64") { + rust_target_arch = "aarch64" +} else if (current_cpu == "mipsel") { + rust_target_arch = "mips" +} else if (current_cpu == "mips64el") { + rust_target_arch = "mips64" +} else if (current_cpu == "s390x") { + rust_target_arch = "s390x" +} else if (current_cpu == "ppc64") { + rust_target_arch = "powerpc64" +} else if (current_cpu == "riscv64") { + rust_target_arch = "riscv64" +} + +assert(!toolchain_has_rust || rust_target_arch != "") + +# Must use Chromium Rust toolchain to get precisely matching LLVM versions +# in order to enable LTO. Some say that LTO probably works if LLVM is "close +# enough", but we don't want to take that risk. +assert(!use_thin_lto || !enable_rust || use_chromium_rust_toolchain || + use_unverified_rust_toolchain, + "Must use Chromium Rust toolchain for LTO") + +# Determine whether the local libstd can and should be built. +local_libstd_supported = enable_local_libstd && use_chromium_rust_toolchain + +# Determine whether the prebuilt libstd can be used +prebuilt_libstd_supported = !use_chromium_rust_toolchain || + (target_os == "linux" && target_cpu == "x64") + +# Arguments for Rust invocation. +# This is common between gcc/clang, Mac and Windows toolchains so specify once, +# here. This is not the complete command-line: toolchains should add -o +# and probably --emit arguments too. +rustc_common_args = "--crate-name {{crate_name}} {{source}} --crate-type {{crate_type}} {{rustflags}}" + +# Rust procedural macros are shared objects loaded into a prebuilt host rustc +# binary. To build them, we obviously need to build for the host. Not only that, +# but because the host rustc is prebuilt, it lacks the machinery to be able to +# load shared objects built using sanitizers (ASAN etc.) For that reason, we need +# to use a host toolchain that lacks sanitizers. This is only strictly necessary +# for procedural macros, but we may also choose to build standalone Rust host +# executable tools using the same toolchain, as they're likely to depend upon +# similar dependencies (syn, quote etc.) and it saves a little build time. +if (using_sanitizer || toolchain_disables_sanitizers) { + host_toolchain_no_sanitizers = "${host_toolchain}_no_sanitizers" +} diff --git a/build/config/sanitizers/BUILD.gn b/build/config/sanitizers/BUILD.gn index 95c0a5ffaa2f..67ca2ed253f5 100644 --- a/build/config/sanitizers/BUILD.gn +++ b/build/config/sanitizers/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -18,46 +18,54 @@ if (is_ios) { group("deps") { if (using_sanitizer) { public_configs = [ - ":sanitizer_options_link_helper", - # Even when a target removes default_sanitizer_flags, it may be depending # on a library that did not remove default_sanitizer_flags. Thus, we need # to add the ldflags here as well as in default_sanitizer_flags. ":default_sanitizer_ldflags", ] - deps = [ ":options_sources" ] - if (is_win) { - exe = ".exe" - } else { - exe = "" + deps = [] + if (!is_fuchsia) { + if (is_win) { + exe = ".exe" + } else { + exe = "" + } + data = [ + "//tools/valgrind/asan/", + "$clang_base_path/bin/llvm-symbolizer${exe}", + ] + } + if (is_asan || is_lsan || is_msan || is_tsan || is_ubsan || is_ubsan_vptr || + is_ubsan_security) { + public_configs += [ ":sanitizer_options_link_helper" ] + deps += [ ":options_sources" ] } - data = [ - "//tools/valgrind/asan/", - "$clang_base_path/bin/llvm-symbolizer${exe}", - ] if (use_prebuilt_instrumented_libraries || use_locally_built_instrumented_libraries) { deps += [ "//third_party/instrumented_libraries:deps" ] } } if (is_asan) { - # ASAN is supported on iOS but the runtime library depends on the compiler - # used (Chromium version of clang versus Xcode version of clang). Only copy - # the ASAN runtime on iOS if building with Chromium clang. - if (is_win || is_mac || (is_ios && !use_xcode_clang)) { + if (is_win || is_apple) { data_deps = [ ":copy_asan_runtime" ] } - if (is_mac || (is_ios && !use_xcode_clang)) { + if (is_apple) { public_deps = [ ":asan_runtime_bundle_data" ] } } + if (use_centipede) { + # For executables which aren't actual fuzzers, we need stubs for + # the sanitizer coverage symbols, because we'll still be generating + # .o files which depend on them. + deps += [ "//third_party/centipede:centipede_weak_sancov_stubs" ] + } } assert(!(is_win && is_asan && current_cpu == "x86"), "ASan is only supported in 64-bit builds on Windows.") -if ((is_mac || is_win || (is_ios && !use_xcode_clang)) && is_asan) { - if (is_mac) { +if ((is_apple || is_win) && is_asan) { + if (is_mac || (is_ios && target_environment == "catalyst")) { _clang_rt_dso_path = "darwin/libclang_rt.asan_osx_dynamic.dylib" } else if (is_ios) { _clang_rt_dso_path = "darwin/libclang_rt.asan_iossim_dynamic.dylib" @@ -100,7 +108,7 @@ if ((is_mac || is_win || (is_ios && !use_xcode_clang)) && is_asan) { config("sanitizer_options_link_helper") { if (is_apple) { - ldflags = [ "-Wl,-U,_sanitizer_options_link_helper" ] + ldflags = [ "-Wl,-u,__sanitizer_options_link_helper" ] } else if (!is_win) { ldflags = [ "-Wl,-u_sanitizer_options_link_helper" ] } @@ -111,7 +119,7 @@ static_library("options_sources") { # unconditionally linked into targets. visibility = [ ":deps", - "//:gn_visibility", + "//:gn_all", ] sources = [ "//build/sanitizers/sanitizer_options.cc" ] @@ -156,31 +164,37 @@ config("default_sanitizer_ldflags") { ] if (is_posix || is_fuchsia) { + sanitizers = [] # sanitizers applicable to both clang and rustc ldflags = [] + rustflags = [] if (is_asan) { - ldflags += [ "-fsanitize=address" ] + sanitizers += [ "address" ] } if (is_hwasan) { - ldflags += [ "-fsanitize=hwaddress" ] + sanitizers += [ "hwaddress" ] } if (is_lsan) { + # In Chromium, is_lsan always implies is_asan. ASAN includes LSAN. + # It seems harmless to pass both options to clang, but it doesn't + # work on rustc, so apply this option to clang only. ldflags += [ "-fsanitize=leak" ] } if (is_tsan) { - ldflags += [ "-fsanitize=thread" ] + sanitizers += [ "thread" ] } if (is_msan) { - ldflags += [ "-fsanitize=memory" ] + sanitizers += [ "memory" ] } if (is_ubsan || is_ubsan_security) { ldflags += [ "-fsanitize=undefined" ] } - if (is_ubsan_null) { - ldflags += [ "-fsanitize=null" ] - } if (is_ubsan_vptr) { ldflags += [ "-fsanitize=vptr" ] } + foreach(sanitizer, sanitizers) { + ldflags += [ "-fsanitize=$sanitizer" ] + rustflags += [ "-Zsanitizer=$sanitizer" ] + } if (use_sanitizer_coverage) { if (use_libfuzzer) { @@ -221,19 +235,11 @@ config("default_sanitizer_ldflags") { # In the static-library build, ASan libraries are different for # executables and dlls, see link_executable and link_shared_library below. # This here handles only the component build. - if (current_cpu == "x64") { - # Windows 64-bit. - libs = [ - "clang_rt.asan_dynamic-x86_64.lib", - "clang_rt.asan_dynamic_runtime_thunk-x86_64.lib", - ] - } else { - assert(current_cpu == "x86", "WinASan unsupported architecture") - libs = [ - "clang_rt.asan_dynamic-i386.lib", - "clang_rt.asan_dynamic_runtime_thunk-i386.lib", - ] - } + assert(current_cpu == "x64", "WinASan unsupported architecture") + libs = [ + "clang_rt.asan_dynamic-x86_64.lib", + "clang_rt.asan_dynamic_runtime_thunk-x86_64.lib", + ] } if (use_libfuzzer) { assert(current_cpu == "x64", "LibFuzzer unsupported architecture") @@ -274,47 +280,39 @@ config("asan_flags") { if (is_asan) { cflags += [ "-fsanitize=address" ] if (is_win) { - if (!defined(asan_win_blacklist_path)) { - asan_win_blacklist_path = + if (!defined(asan_win_blocklist_path)) { + asan_win_blocklist_path = rebase_path("//tools/memory/asan/blocklist_win.txt", root_build_dir) } - cflags += [ "-fsanitize-blacklist=$asan_win_blacklist_path" ] + cflags += [ "-fsanitize-ignorelist=$asan_win_blocklist_path" ] } } } config("link_executable") { if (is_asan && is_win && !is_component_build) { - if (current_cpu == "x64") { - ldflags = [ "-wholearchive:clang_rt.asan-x86_64.lib" ] - } else { - assert(current_cpu == "x86", "WinASan unsupported architecture") - ldflags = [ "-wholearchive:clang_rt.asan-i386.lib" ] - } + assert(current_cpu == "x64", "WinASan unsupported architecture") + ldflags = [ "-wholearchive:clang_rt.asan-x86_64.lib" ] } } config("link_shared_library") { if (is_asan && is_win && !is_component_build) { - if (current_cpu == "x64") { - libs = [ "clang_rt.asan_dll_thunk-x86_64.lib" ] - } else { - assert(current_cpu == "x86", "WinASan unsupported architecture") - libs = [ "clang_rt.asan_dll_thunk-i386.lib" ] - } + assert(current_cpu == "x64", "WinASan unsupported architecture") + libs = [ "clang_rt.asan_dll_thunk-x86_64.lib" ] } } config("cfi_flags") { cflags = [] if (is_cfi && current_toolchain == default_toolchain) { - if (!defined(cfi_blacklist_path)) { - cfi_blacklist_path = + if (!defined(cfi_ignorelist_path)) { + cfi_ignorelist_path = rebase_path("//tools/cfi/ignores.txt", root_build_dir) } cflags += [ "-fsanitize=cfi-vcall", - "-fsanitize-blacklist=$cfi_blacklist_path", + "-fsanitize-ignorelist=$cfi_ignorelist_path", ] if (use_cfi_cast) { @@ -411,14 +409,20 @@ config("msan_flags") { if (is_msan) { assert(is_linux || is_chromeos, "msan only supported on linux x86_64/ChromeOS") - if (!defined(msan_blacklist_path)) { - msan_blacklist_path = - rebase_path("//tools/msan/blacklist.txt", root_build_dir) + if (!defined(msan_ignorelist_path)) { + msan_ignorelist_path = + rebase_path("//tools/msan/ignorelist.txt", root_build_dir) } cflags = [ "-fsanitize=memory", "-fsanitize-memory-track-origins=$msan_track_origins", - "-fsanitize-blacklist=$msan_blacklist_path", + "-fsanitize-ignorelist=$msan_ignorelist_path", + + # TODO(https://crbug.com/1317909): evaluate and possibly enable + "-fno-sanitize-memory-use-after-dtor", + + # TODO(https://crbug.com/1369167): evaluate and possibly enable + "-fno-sanitize-memory-param-retval", ] } } @@ -426,13 +430,13 @@ config("msan_flags") { config("tsan_flags") { if (is_tsan) { assert(is_linux || is_chromeos, "tsan only supported on linux x86_64") - if (!defined(tsan_blacklist_path)) { - tsan_blacklist_path = + if (!defined(tsan_ignorelist_path)) { + tsan_ignorelist_path = rebase_path("//tools/memory/tsan_v2/ignores.txt", root_build_dir) } cflags = [ "-fsanitize=thread", - "-fsanitize-blacklist=$tsan_blacklist_path", + "-fsanitize-ignorelist=$tsan_ignorelist_path", ] } } @@ -440,17 +444,19 @@ config("tsan_flags") { config("ubsan_flags") { cflags = [] if (is_ubsan) { - if (!defined(ubsan_blacklist_path)) { - ubsan_blacklist_path = + if (!defined(ubsan_ignorelist_path)) { + ubsan_ignorelist_path = rebase_path("//tools/ubsan/ignorelist.txt", root_build_dir) } cflags += [ + "-fsanitize=alignment", "-fsanitize=bool", "-fsanitize=bounds", "-fsanitize=builtin", "-fsanitize=float-divide-by-zero", "-fsanitize=integer-divide-by-zero", "-fsanitize=null", + "-fsanitize=nonnull-attribute", "-fsanitize=object-size", "-fsanitize=return", "-fsanitize=returns-nonnull-attribute", @@ -458,12 +464,12 @@ config("ubsan_flags") { "-fsanitize=signed-integer-overflow", "-fsanitize=unreachable", "-fsanitize=vla-bound", - "-fsanitize-blacklist=$ubsan_blacklist_path", + "-fsanitize-ignorelist=$ubsan_ignorelist_path", ] # Chromecast ubsan builds fail to compile with these # experimental flags, so only add them to non-chromecast ubsan builds. - if (!is_chromecast) { + if (!is_castos && !is_cast_android) { cflags += [ # Employ the experimental PBQP register allocator to avoid slow # compilation on files with too many basic blocks. @@ -488,8 +494,8 @@ config("ubsan_no_recover") { config("ubsan_security_flags") { if (is_ubsan_security) { - if (!defined(ubsan_security_blacklist_path)) { - ubsan_security_blacklist_path = + if (!defined(ubsan_security_ignorelist_path)) { + ubsan_security_ignorelist_path = rebase_path("//tools/ubsan/security_ignorelist.txt", root_build_dir) } cflags = [ @@ -497,26 +503,20 @@ config("ubsan_security_flags") { "-fsanitize=shift", "-fsanitize=signed-integer-overflow", "-fsanitize=vla-bound", - "-fsanitize-blacklist=$ubsan_security_blacklist_path", + "-fsanitize-ignorelist=$ubsan_security_ignorelist_path", ] } } -config("ubsan_null_flags") { - if (is_ubsan_null) { - cflags = [ "-fsanitize=null" ] - } -} - config("ubsan_vptr_flags") { if (is_ubsan_vptr) { - if (!defined(ubsan_vptr_blacklist_path)) { - ubsan_vptr_blacklist_path = + if (!defined(ubsan_vptr_ignorelist_path)) { + ubsan_vptr_ignorelist_path = rebase_path("//tools/ubsan/vptr_ignorelist.txt", root_build_dir) } cflags = [ "-fsanitize=vptr", - "-fsanitize-blacklist=$ubsan_vptr_blacklist_path", + "-fsanitize-ignorelist=$ubsan_vptr_ignorelist_path", ] } } @@ -539,7 +539,6 @@ all_sanitizer_configs = [ ":tsan_flags", ":ubsan_flags", ":ubsan_no_recover", - ":ubsan_null_flags", ":ubsan_security_flags", ":ubsan_vptr_flags", ":fuzzing_build_mode", diff --git a/build/config/sanitizers/OWNERS b/build/config/sanitizers/OWNERS index f6a122b518c1..331e8bcf5767 100644 --- a/build/config/sanitizers/OWNERS +++ b/build/config/sanitizers/OWNERS @@ -1,3 +1 @@ -inferno@chromium.org metzman@chromium.org -ochang@chromium.org diff --git a/build/config/sanitizers/sanitizers.gni b/build/config/sanitizers/sanitizers.gni index 576bf83c6301..446736e790cc 100644 --- a/build/config/sanitizers/sanitizers.gni +++ b/build/config/sanitizers/sanitizers.gni @@ -1,4 +1,4 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -34,9 +34,6 @@ declare_args() { # Halt the program if a problem is detected. is_ubsan_no_recover = false - # Compile for Undefined Behaviour Sanitizer's null pointer checks. - is_ubsan_null = false - # Track where uninitialized memory originates from. From fastest to slowest: # 0 - no tracking, 1 - track only the initial allocation site, 2 - track the # chain of stores leading from allocation site to use site. @@ -50,23 +47,15 @@ declare_args() { # See http://clang.llvm.org/docs/ControlFlowIntegrity.html # # TODO(pcc): Remove this flag if/when CFI is enabled in all official builds. - # - # Disable this on linux-chromeos to avoid using ThinLTO there; - # crbug.com/1033839. Similarly, don't use this on ARC builds. - # - # TODO(crbug.com/1159424): Reassess the validity of the next expression. - is_cfi = - is_official_build && - (((target_os == "linux" || is_chromeos_lacros) && target_cpu == "x64") || - ((is_chromeos_ash || is_chromeos_lacros) && is_chromeos_device)) + is_cfi = is_official_build && is_clang && + ((target_os == "linux" && target_cpu == "x64") || + (is_chromeos && is_chromeos_device)) # Enable checks for indirect function calls via a function pointer. # TODO(pcc): remove this when we're ready to add these checks by default. # https://crbug.com/701919 - # - # TODO(crbug.com/1159424): Reassess the validity of the next expression. - use_cfi_icall = (target_os == "linux" || is_chromeos_lacros) && - target_cpu == "x64" && is_official_build + use_cfi_icall = + target_os == "linux" && target_cpu == "x64" && is_official_build # Print detailed diagnostics when Control Flow Integrity detects a violation. use_cfi_diag = false @@ -79,6 +68,10 @@ declare_args() { # See http://www.chromium.org/developers/testing/libfuzzer use_libfuzzer = false + # Compile for fuzzing with centipede. + # See https://github.com/google/centipede + use_centipede = false + # Compile for fuzzing with AFL. use_afl = false @@ -120,14 +113,17 @@ declare_args() { # Enable checks for bad casts: derived cast and unrelated cast. # TODO(krasin): remove this, when we're ready to add these checks by default. # https://crbug.com/626794 - use_cfi_cast = is_cfi && (is_chromeos_ash || is_chromeos_lacros) + use_cfi_cast = is_cfi && is_chromeos # Compile for Undefined Behaviour Sanitizer's vptr checks. is_ubsan_vptr = is_ubsan_security } +assert(!is_hwasan || (target_os == "android" && target_cpu == "arm64"), + "HWASan only supported on Android ARM64 builds.") + # Disable sanitizers for non-target toolchains. -if (!is_a_target_toolchain) { +if (!is_a_target_toolchain || toolchain_disables_sanitizers) { is_asan = false is_cfi = false is_hwasan = false @@ -135,18 +131,20 @@ if (!is_a_target_toolchain) { is_msan = false is_tsan = false is_ubsan = false - is_ubsan_null = false is_ubsan_no_recover = false is_ubsan_security = false is_ubsan_vptr = false msan_track_origins = 0 sanitizer_coverage_flags = "" use_afl = false + use_centipede = false use_cfi_diag = false use_cfi_recover = false use_libfuzzer = false use_locally_built_instrumented_libraries = false use_sanitizer_coverage = false +} else if (current_cpu != "arm64") { + is_hwasan = false } # Use dynamic libraries instrumented by one of the sanitizers instead of the @@ -157,8 +155,18 @@ use_prebuilt_instrumented_libraries = is_msan # Whether we are doing a fuzzer build. Normally this should be checked instead # of checking "use_libfuzzer || use_afl" because often developers forget to -# check for "use_afl". -use_fuzzing_engine = use_libfuzzer || use_afl || use_external_fuzzing_engine +# check for "use_afl", and "use_centipede" is new. +use_fuzzing_engine = + use_libfuzzer || use_afl || use_centipede || use_external_fuzzing_engine + +# Whether the current fuzzing engine supports libprotobuf_mutator. Right now +# this is just libfuzzer, but others are likely to support this in future, +# so it's preferable to check this. +use_fuzzing_engine_with_lpm = use_libfuzzer || use_centipede + +# Whether the fuzzing engine supports fuzzers which supply their own +# "main" function. +fuzzing_engine_supports_custom_main = use_libfuzzer || use_centipede # Args that are in turn dependent on other args must be in a separate # declare_args block. User overrides are only applied at the end of a @@ -185,6 +193,16 @@ declare_args() { if (use_fuzzing_engine && sanitizer_coverage_flags == "") { sanitizer_coverage_flags = "trace-pc-guard" + if (use_centipede) { + # Centipede's minimal flags are listed in //third_party/centipede/src/clang-flags.txt. + # But, for users like Chromium using an up-to-date clang, we can also + # enable extra optional types of coverage which may make Centipede more + # effective. This list is not currently documented and has been derived + # from discussion with centipede creators (though one is warned about at + # https://github.com/google/centipede/blob/main/centipede_callbacks.cc#L68) + sanitizer_coverage_flags = sanitizer_coverage_flags + + ",pc-table,trace-cmp,control-flow,trace-loads" + } } else if (use_sanitizer_coverage && sanitizer_coverage_flags == "") { sanitizer_coverage_flags = "trace-pc-guard,indirect-calls" } @@ -192,9 +210,9 @@ if (use_fuzzing_engine && sanitizer_coverage_flags == "") { # Whether we are linking against a sanitizer runtime library. Among other # things, this changes the default symbol level and other settings in order to # prepare to create stack traces "live" using the sanitizer runtime. -using_sanitizer = is_asan || is_hwasan || is_lsan || is_tsan || is_msan || - is_ubsan || is_ubsan_null || is_ubsan_vptr || - is_ubsan_security || use_sanitizer_coverage || use_cfi_diag +using_sanitizer = + is_asan || is_hwasan || is_lsan || is_tsan || is_msan || is_ubsan || + is_ubsan_vptr || is_ubsan_security || use_sanitizer_coverage || use_cfi_diag assert(!using_sanitizer || is_clang, "Sanitizers (is_*san) require setting is_clang = true in 'gn args'") @@ -226,12 +244,9 @@ if (use_libfuzzer && (is_linux || is_chromeos)) { # unsupported or unadvisable configurations. # # For one-off testing, just comment this assertion out. -assert(!is_debug || !(is_msan || is_ubsan || is_ubsan_null || is_ubsan_vptr), +assert(!is_debug || !(is_msan || is_ubsan || is_ubsan_vptr), "Sanitizers should generally be used in release (set is_debug=false).") -assert(!is_hwasan || (is_android && current_cpu == "arm64"), - "HWASan only supported on Android ARM64 builds.") - assert(!is_msan || ((is_linux || is_chromeos) && current_cpu == "x64"), "MSan currently only works on 64-bit Linux and ChromeOS builds.") @@ -294,3 +309,22 @@ if (use_fuzzing_engine) { } } } + +# Options common to different fuzzer engines. +# Engine should be compiled without coverage (infinite loop in trace_cmp). +fuzzing_engine_remove_configs = [ + "//build/config/coverage:default_coverage", + "//build/config/sanitizers:default_sanitizer_flags", +] + +# Add any sanitizer flags back. In MSAN builds, instrumenting libfuzzer with +# MSAN is necessary since all parts of the binary need to be instrumented for it +# to work. ASAN builds are more subtle: libfuzzer depends on features from the +# C++ STL. If it were not instrumented, templates would be insantiated without +# ASAN from libfuzzer and with ASAN in other TUs. The linker might merge +# instrumented template instantiations with non-instrumented ones (which could +# have a different ABI) in the final binary, which is problematic for TUs +# expecting one particular ABI (https://crbug.com/915422). The other sanitizers +# are added back for the same reason. +fuzzing_engine_add_configs = + [ "//build/config/sanitizers:default_sanitizer_flags_but_coverage" ] diff --git a/build/config/siso/.gitignore b/build/config/siso/.gitignore new file mode 100644 index 000000000000..522449bd69b0 --- /dev/null +++ b/build/config/siso/.gitignore @@ -0,0 +1 @@ +/.sisoenv diff --git a/build/config/siso/OWNERS b/build/config/siso/OWNERS new file mode 100644 index 000000000000..03122b7b39f1 --- /dev/null +++ b/build/config/siso/OWNERS @@ -0,0 +1,6 @@ +# All current members of the Chrome Build Team. +jwata@google.com +philwo@google.com +richardwa@google.com +tikuta@chromium.org +ukai@google.com diff --git a/build/config/siso/README.md b/build/config/siso/README.md new file mode 100644 index 000000000000..ff38eba47501 --- /dev/null +++ b/build/config/siso/README.md @@ -0,0 +1,8 @@ +# Build config for Siso + +This directory contains configurations for +[siso](https://chromium.googlesource.com/infra/infra/+/refs/heads/main/go/src/infra/build/siso/) +build tool. + +Please refer to [the config specifications](https://chromium.googlesource.com/infra/infra/+/refs/heads/main/go/src/infra/build/siso/docs/starlark_config.md) in the Siso repo. + diff --git a/build/config/siso/clang_linux.star b/build/config/siso/clang_linux.star new file mode 100644 index 000000000000..b88af3b93552 --- /dev/null +++ b/build/config/siso/clang_linux.star @@ -0,0 +1,109 @@ +# -*- bazel-starlark -*- +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Siso configuration for clang/linux.""" + +load("@builtin//path.star", "path") +load("@builtin//struct.star", "module") + +__filegroups = {} + +def __clang_compile_coverage(ctx, cmd): + # TODO(b/278225415): add better support for coverage build. + # The instrument file contains the list of files affected by a patch. + # Including this file to remote action input prevents cache hits. + inputs = [] + deps_args = [] + for i, arg in enumerate(cmd.args): + if i == 0: + continue + if arg == "../../build/toolchain/clang_code_coverage_wrapper.py": + continue + if arg.startswith("--files-to-instrument="): + inputs.append(ctx.fs.canonpath(arg.removeprefix("--files-to-instrument="))) + continue + if len(deps_args) == 0 and path.base(arg).find("clang") >= 0: + deps_args.append(arg) + continue + if deps_args: + if arg in ["-MD", "-MMD", "-c"]: + continue + if arg.startswith("-MF") or arg.startswith("-o"): + continue + if i > 1 and cmd.args[i - 1] in ["-MF", "-o"]: + continue + deps_args.append(arg) + if deps_args: + deps_args.append("-M") + ctx.actions.fix( + tool_inputs = cmd.tool_inputs + inputs, + deps_args = deps_args, + ) + +__handlers = { + "clang_compile_coverage": __clang_compile_coverage, +} + +def __step_config(ctx, step_config): + step_config["input_deps"].update({ + # clang++ is a symlink to clang + # but siso doesn't add symlink target automatically. + "third_party/llvm-build/Release+Asserts/bin/clang++": [ + "third_party/llvm-build/Release+Asserts/bin/clang", + ], + }) + step_config["rules"].extend([ + { + "name": "clang/cxx", + "action": "(.*_)?cxx", + "command_prefix": "../../third_party/llvm-build/Release+Asserts/bin/clang++ ", + "inputs": [ + "third_party/llvm-build/Release+Asserts/bin/clang++", + ], + "remote": True, + "canonicalize_dir": True, + }, + { + "name": "clang/cc", + "action": "(.*_)?cc", + "command_prefix": "../../third_party/llvm-build/Release+Asserts/bin/clang ", + "inputs": [ + "third_party/llvm-build/Release+Asserts/bin/clang", + ], + "remote": True, + "canonicalize_dir": True, + }, + { + "name": "clang-coverage/cxx", + "action": "(.*_)?cxx", + "command_prefix": "\"python3\" ../../build/toolchain/clang_code_coverage_wrapper.py", + "inputs": [ + "build/toolchain/clang_code_coverage_wrapper.py", + "third_party/llvm-build/Release+Asserts/bin/clang++", + ], + "handler": "clang_compile_coverage", + "remote": True, + "canonicalize_dir": True, + }, + { + "name": "clang-coverage/cc", + "action": "(.*_)?cc", + "command_prefix": "\"python3\" ../../build/toolchain/clang_code_coverage_wrapper.py", + "inputs": [ + "build/toolchain/clang_code_coverage_wrapper.py", + "third_party/llvm-build/Release+Asserts/bin/clang", + ], + "handler": "clang_compile_coverage", + "remote": True, + "canonicalize_dir": True, + }, + ]) + return step_config + +clang = module( + "clang", + step_config = __step_config, + filegroups = __filegroups, + handlers = __handlers, +) diff --git a/build/config/siso/configure_siso.py b/build/config/siso/configure_siso.py new file mode 100755 index 000000000000..2770f6e72b95 --- /dev/null +++ b/build/config/siso/configure_siso.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""This script is used to configure siso.""" + +import argparse +import os +import sys + +THIS_DIR = os.path.abspath(os.path.dirname(__file__)) + + +def main(): + parser = argparse.ArgumentParser(description='configure siso') + parser.add_argument('--rbe_instance', help='RBE instance to use for Siso') + args = parser.parse_args() + + project = None + if not args.rbe_instance: + return 0 + rbe_instance = args.rbe_instance + elems = rbe_instance.split('/') + if len(elems) == 4 and elems[0] == 'projects': + project = elems[1] + rbe_instance = elems[-1] + siso_env_path = os.path.join(THIS_DIR, '.sisoenv') + with open(siso_env_path, 'w') as f: + if project: + f.write('SISO_PROJECT=%s\n' % project) + f.write('SISO_REAPI_INSTANCE=%s\n' % rbe_instance) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/config/siso/linux.star b/build/config/siso/linux.star new file mode 100644 index 000000000000..d02318e7c708 --- /dev/null +++ b/build/config/siso/linux.star @@ -0,0 +1,43 @@ +# -*- bazel-starlark -*- +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Siso configuration for linux.""" + +load("@builtin//struct.star", "module") +load("./clang_linux.star", "clang") +load("./mojo.star", "mojo") +load("./nacl_linux.star", "nacl") +load("./remote_exec_wrapper.star", "remote_exec_wrapper") + +__filegroups = {} +__filegroups.update(clang.filegroups) +__filegroups.update(mojo.filegroups) +__filegroups.update(nacl.filegroups) + +__handlers = {} +__handlers.update(clang.handlers) +__handlers.update(mojo.handlers) +__handlers.update(nacl.handlers) + +def __step_config(ctx, step_config): + step_config["platforms"] = { + "default": { + "OSFamily": "Linux", + "container-image": "docker://gcr.io/chops-private-images-prod/rbe/siso-chromium/linux@sha256:d4fcda628ebcdb3dd79b166619c56da08d5d7bd43d1a7b1f69734904cc7a1bb2", + }, + } + if remote_exec_wrapper.enabled(ctx): + step_config = remote_exec_wrapper.step_config(ctx, step_config) + else: + step_config = clang.step_config(ctx, step_config) + step_config = mojo.step_config(ctx, step_config) + step_config = nacl.step_config(ctx, step_config) + return step_config + +chromium = module( + "chromium", + step_config = __step_config, + filegroups = __filegroups, + handlers = __handlers, +) diff --git a/build/config/siso/mac.star b/build/config/siso/mac.star new file mode 100644 index 000000000000..7a638b9d33a3 --- /dev/null +++ b/build/config/siso/mac.star @@ -0,0 +1,23 @@ +# -*- bazel-starlark -*- +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Siso configuration for macOS.""" + +load("@builtin//struct.star", "module") +load("./remote_exec_wrapper.star", "remote_exec_wrapper") + +__filegroups = {} +__handlers = {} + +def __step_config(ctx, step_config): + if remote_exec_wrapper.enabled(ctx): + step_config = remote_exec_wrapper.step_config(ctx, step_config) + return step_config + +chromium = module( + "chromium", + step_config = __step_config, + filegroups = __filegroups, + handlers = __handlers, +) diff --git a/build/config/siso/main.star b/build/config/siso/main.star new file mode 100644 index 000000000000..67121dca9498 --- /dev/null +++ b/build/config/siso/main.star @@ -0,0 +1,47 @@ +# -*- bazel-starlark -*- +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Siso configuration main entry.""" + +load("@builtin//encoding.star", "json") +load("@builtin//runtime.star", "runtime") +load("@builtin//struct.star", "module") +load("./linux.star", chromium_linux = "chromium") +load("./mac.star", chromium_mac = "chromium") +load("./simple.star", "simple") +load("./windows.star", chromium_windows = "chromium") + +def init(ctx): + print("runtime: os:%s arch:%s run:%d" % ( + runtime.os, + runtime.arch, + runtime.num_cpu, + )) + host = { + "linux": chromium_linux, + "darwin": chromium_mac, + "windows": chromium_windows, + }[runtime.os] + step_config = { + "platforms": {}, + "input_deps": {}, + "rules": [], + } + step_config = host.step_config(ctx, step_config) + step_config = simple.step_config(ctx, step_config) + + filegroups = {} + filegroups.update(host.filegroups) + filegroups.update(simple.filegroups) + + handlers = {} + handlers.update(host.handlers) + handlers.update(simple.handlers) + + return module( + "config", + step_config = json.encode(step_config), + filegroups = filegroups, + handlers = handlers, + ) diff --git a/build/config/siso/mojo.star b/build/config/siso/mojo.star new file mode 100644 index 000000000000..45502e745dcd --- /dev/null +++ b/build/config/siso/mojo.star @@ -0,0 +1,129 @@ +# -*- bazel-starlark -*- +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Siso configuration for mojo.""" + +load("@builtin//struct.star", "module") + +__filegroups = {} + +__handlers = {} + +def __step_config(ctx, step_config): + step_config["rules"].extend([ + { + "name": "mojo/mojom_bindigns_generator", + "command_prefix": "python3 ../../mojo/public/tools/bindings/mojom_bindings_generator.py", + "inputs": [ + "mojo/public/tools/bindings/mojom_bindings_generator.py", + ], + "indirect_inputs": { + "includes": [ + "*.js", + "*.mojom", + "*.mojom-module", + "*.test-mojom", + "*.test-mojom-module", + "*.zip", + ], + }, + "exclude_input_patterns": [ + "*.stamp", + ], + # TODO(crbug.com/1437820): unspecified outputs of mojom_bindings_generator.py + "outputs_map": { + "./gen/components/aggregation_service/aggregation_service.mojom.js": { + "outputs": [ + "./gen/mojom-webui/components/aggregation_service/aggregation_service.mojom-webui.js", + ], + }, + "./gen/components/attribution_reporting/eligibility_error.mojom.js": { + "outputs": [ + "./gen/mojom-webui/components/attribution_reporting/eligibility_error.mojom-webui.js", + "./gen/mojom-webui/components/attribution_reporting/registration_type.mojom-webui.js", + "./gen/mojom-webui/components/attribution_reporting/source_registration_error.mojom-webui.js", + "./gen/mojom-webui/components/attribution_reporting/trigger_registration_error.mojom-webui.js", + ], + }, + "./gen/components/attribution_reporting/registration.mojom.js": { + "outputs": [ + "./gen/mojom-webui/components/attribution_reporting/registration.mojom-webui.js", + ], + }, + "./gen/media/capture/mojom/image_capture.mojom.js": { + "outputs": [ + "./gen/mojom-webui/media/capture/mojom/image_capture.mojom-webui.js", + ], + }, + "./gen/services/device/public/mojom/usb_device.mojom.js": { + "outputs": [ + "./gen/mojom-webui/services/device/public/mojom/usb_device.mojom-webui.js", + "./gen/mojom-webui/services/device/public/mojom/usb_enumeration_options.mojom-webui.js", + "./gen/mojom-webui/services/device/public/mojom/usb_manager.mojom-webui.js", + "./gen/mojom-webui/services/device/public/mojom/usb_manager_client.mojom-webui.js", + ], + }, + "./gen/services/media_session/public/mojom/audio_focus.mojom.js": { + "outputs": [ + "./gen/mojom-webui/services/media_session/public/mojom/audio_focus.mojom-webui.js", + "./gen/mojom-webui/services/media_session/public/mojom/constants.mojom-webui.js", + "./gen/mojom-webui/services/media_session/public/mojom/media_controller.mojom-webui.js", + "./gen/mojom-webui/services/media_session/public/mojom/media_session.mojom-webui.js", + ], + }, + "./gen/services/network/public/mojom/attribution.mojom.js": { + "outputs": [ + "./gen/mojom-webui/services/network/public/mojom/attribution.mojom-webui.js", + ], + }, + "./gen/services/network/public/mojom/schemeful_site.mojom.js": { + "outputs": [ + "./gen/mojom-webui/services/network/public/mojom/schemeful_site.mojom-webui.js", + ], + }, + "./gen/third_party/blink/public/mojom/quota/quota_manager_host.mojom.js": { + "outputs": [ + "./gen/mojom-webui/third_party/blink/public/mojom/quota/quota_manager_host.mojom-webui.js", + "./gen/mojom-webui/third_party/blink/public/mojom/quota/quota_types.mojom-webui.js", + ], + }, + "./gen/third_party/blink/public/mojom/storage_key/ancestor_chain_bit.mojom.js": { + "outputs": [ + "./gen/mojom-webui/third_party/blink/public/mojom/storage_key/ancestor_chain_bit.mojom-webui.js", + "./gen/mojom-webui/third_party/blink/public/mojom/storage_key/storage_key.mojom-webui.js", + ], + }, + "./gen/ui/base/mojom/ui_base_types.mojom.js": { + "outputs": [ + "./gen/mojom-webui/ui/base/mojom/ui_base_types.mojom-webui.js", + "./gen/mojom-webui/ui/base/mojom/window_open_disposition.mojom-webui.js", + ], + }, + "./gen/ui/gfx/image/mojom/image.mojom.js": { + "outputs": [ + "./gen/mojom-webui/ui/gfx/image/mojom/image.mojom-webui.js", + ], + }, + }, + "restat": True, + "remote": True, + "output_local": True, + "platform": { + # mojo_bindings_generators.py will run faster on n2-highmem-8 + # than n2-custom-2-3840 + # e.g. + # n2-highmem-8: exec: 880.202978ms + # n2-custom-2-3840: exec: 2.42808488s + "gceMachineType": "n2-highmem-8", + }, + }, + ]) + return step_config + +mojo = module( + "mojo", + step_config = __step_config, + filegroups = __filegroups, + handlers = __handlers, +) diff --git a/build/config/siso/nacl_linux.star b/build/config/siso/nacl_linux.star new file mode 100644 index 000000000000..b6026ac2005c --- /dev/null +++ b/build/config/siso/nacl_linux.star @@ -0,0 +1,179 @@ +# -*- bazel-starlark -*- +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Siso configuration for nacl/linux.""" + +load("@builtin//struct.star", "module") + +__filegroups = { + "native_client/toolchain/linux_x86/pnacl_newlib/bin/pydir:pydir": { + "type": "glob", + "includes": ["*.py"], + }, + "native_client/toolchain/linux_x86/pnacl_newlib/lib:libllvm": { + "type": "glob", + "includes": ["libLLVM*.so"], + }, + "native_client/toolchain/linux_x86/saigo_newlib/bin:clang": { + "type": "glob", + "includes": ["clang-*"], + }, + "native_client/toolchain/linux_x86/saigo_newlib/lib:libso": { + "type": "glob", + "includes": ["*.so*"], + }, + "native_client/toolchain/linux_x86/nacl_x86_glibc/lib/gcc/x86_64-nacl:crtbegin": { + "type": "glob", + "includes": ["crtbegin.o"], + }, + "native_client/toolchain/linux_x86/nacl_x86_glibc/libexec/gcc/x86_64-nacl:ccbackend": { + "type": "glob", + "includes": ["cc1", "cc1plus", "collect2"], + }, +} + +__handlers = {} + +def __step_config(ctx, step_config): + step_config["rules"].extend([ + { + "name": "nacl_linux/pnacl-clang++", + "action": "newlib_pnacl.*_cxx", + "command_prefix": "../../native_client/toolchain/linux_x86/pnacl_newlib/bin/pnacl-clang++", + "inputs": [ + "native_client/toolchain/linux_x86/pnacl_newlib/bin/pnacl-clang++", + ], + "remote": True, + "input_root_absolute_path": True, + }, + { + "name": "nacl_linux/pnacl-clang", + "action": "newlib_pnacl.*_cc", + "command_prefix": "../../native_client/toolchain/linux_x86/pnacl_newlib/bin/pnacl-clang", + "inputs": [ + "native_client/toolchain/linux_x86/pnacl_newlib/bin/pnacl-clang", + ], + "remote": True, + "input_root_absolute_path": True, + }, + { + "name": "nacl_linux/glibc/x86_64-nacl-gcc", + "action": "glibc_x64_cc", + "inputs": [ + "native_client/toolchain/linux_x86/nacl_x86_glibc/bin/x86_64-nacl-gcc", + ], + # ELF-32 doesn't work on gVisor, + # so will local-fallback if gVisor is used. + # TODO(b/278485912): remote=True for trusted instance. + "remote": False, + "input_root_absolute_path": True, + }, + { + "name": "nacl_linux/glibc/x86_64-nacl-g++", + "action": "glibc_x64_cxx", + "inputs": [ + "native_client/toolchain/linux_x86/nacl_x86_glibc/bin/x86_64-nacl-g++", + ], + # ELF-32 doesn't work on gVisor, + # so will local-fallback if gVisor is used. + # TODO(b/278485912): remote=True for trusted instance. + "remote": False, + "input_root_absolute_path": True, + }, + { + "name": "nacl_linux/pnacl_newlib/x86_64-nacl-clang++", + "action": "clang_newlib_x64_cxx", + "inputs": [ + "native_client/toolchain/linux_x86/pnacl_newlib/bin/x86_64-nacl-clang++", + "native_client/toolchain/linux_x86/pnacl_newlib/x86_64-nacl/bin/ld", + ], + "remote": True, + "input_root_absolute_path": True, + }, + { + "name": "nacl_linux/pnacl_newlib/x86_64-nacl-clang", + "action": "clang_newlib_x64_cc", + "inputs": [ + "native_client/toolchain/linux_x86/pnacl_newlib/bin/x86_64-nacl-clang", + "native_client/toolchain/linux_x86/pnacl_newlib/x86_64-nacl/bin/ld", + ], + "remote": True, + "input_root_absolute_path": True, + }, + { + "name": "nacl_linux/saigo_newlib/x86_64-nacl-clang++", + "action": "irt_x64_cxx", + "command_prefix": "../../native_client/toolchain/linux_x86/saigo_newlib/bin/x86_64-nacl-clang++", + "inputs": [ + "native_client/toolchain/linux_x86/saigo_newlib/bin/x86_64-nacl-clang++", + ], + "remote": True, + "input_root_absolute_path": True, + }, + { + "name": "nacl_linux/saigo_newlib/x86_64-nacl-clang", + "action": "irt_x64_cc", + "command_prefix": "../../native_client/toolchain/linux_x86/saigo_newlib/bin/x86_64-nacl-clang", + "inputs": [ + "native_client/toolchain/linux_x86/saigo_newlib/bin/x86_64-nacl-clang", + ], + "remote": True, + "input_root_absolute_path": True, + }, + ]) + + step_config["input_deps"].update({ + "native_client/toolchain/linux_x86/pnacl_newlib/bin/pnacl-clang": [ + "native_client/toolchain/linux_x86/pnacl_newlib/bin/clang", + "native_client/toolchain/linux_x86/pnacl_newlib/bin/driver.conf", + "native_client/toolchain/linux_x86/pnacl_newlib/bin/pnacl-llc", + "native_client/toolchain/linux_x86/pnacl_newlib/bin/pydir:pydir", + "native_client/toolchain/linux_x86/pnacl_newlib/lib:libllvm", + "native_client/toolchain/linux_x86/pnacl_newlib/x86_64-nacl/bin/ld", + ], + "native_client/toolchain/linux_x86/pnacl_newlib/bin/pnacl-clang++": [ + "native_client/toolchain/linux_x86/pnacl_newlib/bin/clang", + "native_client/toolchain/linux_x86/pnacl_newlib/bin/driver.conf", + "native_client/toolchain/linux_x86/pnacl_newlib/bin/pnacl-llc", + "native_client/toolchain/linux_x86/pnacl_newlib/bin/pydir:pydir", + "native_client/toolchain/linux_x86/pnacl_newlib/lib:libllvm", + "native_client/toolchain/linux_x86/pnacl_newlib/x86_64-nacl/bin/ld", + ], + "native_client/toolchain/linux_x86/pnacl_newlib/bin/x86_64-nacl-clang": [ + "native_client/toolchain/linux_x86/pnacl_newlib/lib:libllvm", + ], + "native_client/toolchain/linux_x86/pnacl_newlib/bin/x86_64-nacl-clang++": [ + "native_client/toolchain/linux_x86/pnacl_newlib/lib:libllvm", + ], + "native_client/toolchain/linux_x86/saigo_newlib/bin/x86_64-nacl-clang": [ + "native_client/toolchain/linux_x86/saigo_newlib/bin:clang", + "native_client/toolchain/linux_x86/saigo_newlib/lib:libso", + "native_client/toolchain/linux_x86/saigo_newlib/x86_64-nacl/bin/ld", + ], + "native_client/toolchain/linux_x86/saigo_newlib/bin/x86_64-nacl-clang++": [ + "native_client/toolchain/linux_x86/saigo_newlib/bin:clang", + "native_client/toolchain/linux_x86/saigo_newlib/lib:libso", + "native_client/toolchain/linux_x86/saigo_newlib/x86_64-nacl/bin/ld", + ], + "native_client/toolchain/linux_x86/nacl_x86_glibc/bin/x86_64-nacl-gcc": [ + "native_client/toolchain/linux_x86/nacl_x86_glibc/bin/x86_64-nacl-as", + "native_client/toolchain/linux_x86/nacl_x86_glibc/lib/gcc/x86_64-nacl:crtbegin", + "native_client/toolchain/linux_x86/nacl_x86_glibc/libexec/gcc/x86_64-nacl:ccbackend", + "native_client/toolchain/linux_x86/nacl_x86_glibc/x86_64-nacl/bin/as", + ], + "native_client/toolchain/linux_x86/nacl_x86_glibc/bin/x86_64-nacl-g++": [ + "native_client/toolchain/linux_x86/nacl_x86_glibc/bin/x86_64-nacl-as", + "native_client/toolchain/linux_x86/nacl_x86_glibc/lib/gcc/x86_64-nacl:crtbegin", + "native_client/toolchain/linux_x86/nacl_x86_glibc/libexec/gcc/x86_64-nacl:ccbackend", + "native_client/toolchain/linux_x86/nacl_x86_glibc/x86_64-nacl/bin/as", + ], + }) + return step_config + +nacl = module( + "nacl", + step_config = __step_config, + filegroups = __filegroups, + handlers = __handlers, +) diff --git a/build/config/siso/remote_exec_wrapper.star b/build/config/siso/remote_exec_wrapper.star new file mode 100644 index 000000000000..dcd516920e00 --- /dev/null +++ b/build/config/siso/remote_exec_wrapper.star @@ -0,0 +1,58 @@ +# -*- bazel-starlark -*- +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Siso configuration for remote exec wrapper.""" + +load("@builtin//lib/gn.star", "gn") +load("@builtin//struct.star", "module") + +__filegroups = {} +__handlers = {} + +def __enabled(ctx): + if "args.gn" in ctx.metadata: + gn_args = gn.parse_args(ctx.metadata["args.gn"]) + if gn_args.get("use_goma") == "true": + return True + if gn_args.get("use_remoteexec") == "true": + return True + return False + +def __step_config(ctx, step_config): + step_config["rules"].extend([ + { + "name": "clang/cxx", + "action": "(.*_)?cxx", + "use_remote_exec_wrapper": True, + }, + { + "name": "clang/cc", + "action": "(.*_)?cc", + "use_remote_exec_wrapper": True, + }, + { + "name": "clang/objcxx", + "action": "(.*_)?objcxx", + "use_remote_exec_wrapper": True, + }, + { + "name": "clang/objc", + "action": "(.*_)?objc", + "use_remote_exec_wrapper": True, + }, + { + "name": "action_remote", + "command_prefix": "python3 ../../build/util/action_remote.py", + "use_remote_exec_wrapper": True, + }, + ]) + return step_config + +remote_exec_wrapper = module( + "remote_exec_wrapper", + enabled = __enabled, + step_config = __step_config, + filegroups = __filegroups, + handlers = __handlers, +) diff --git a/build/config/siso/simple.star b/build/config/siso/simple.star new file mode 100644 index 000000000000..71b18d0a797d --- /dev/null +++ b/build/config/siso/simple.star @@ -0,0 +1,46 @@ +# -*- bazel-starlark -*- +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Siso configuration for simple steps.""" + +load("@builtin//struct.star", "module") + +def __copy(ctx, cmd): + input = cmd.inputs[0] + out = cmd.outputs[0] + ctx.actions.copy(input, out, recursive = ctx.fs.is_dir(input)) + ctx.actions.exit(exit_status = 0) + +def __stamp(ctx, cmd): + out = cmd.outputs[0] + ctx.actions.write(out) + ctx.actions.exit(exit_status = 0) + +__handlers = { + "copy": __copy, + "stamp": __stamp, +} + +def __step_config(ctx, step_config): + step_config["rules"].extend([ + { + "name": "simple/copy", + "action": "(.*_)?copy", + "handler": "copy", + }, + { + "name": "simple/stamp", + "action": "(.*_)?stamp", + "handler": "stamp", + "replace": True, + }, + ]) + return step_config + +simple = module( + "simple", + step_config = __step_config, + filegroups = {}, + handlers = __handlers, +) diff --git a/build/config/siso/windows.star b/build/config/siso/windows.star new file mode 100644 index 000000000000..88636f7b8ca6 --- /dev/null +++ b/build/config/siso/windows.star @@ -0,0 +1,23 @@ +# -*- bazel-starlark -*- +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Siso configuration for Windows.""" + +load("@builtin//struct.star", "module") +load("./remote_exec_wrapper.star", "remote_exec_wrapper") + +__filegroups = {} +__handlers = {} + +def __step_config(ctx, step_config): + if remote_exec_wrapper.enabled(ctx): + step_config = remote_exec_wrapper.step_config(ctx, step_config) + return step_config + +chromium = module( + "chromium", + step_config = __step_config, + filegroups = __filegroups, + handlers = __handlers, +) diff --git a/build/config/sysroot.gni b/build/config/sysroot.gni index 765623b09171..b81eb0055745 100644 --- a/build/config/sysroot.gni +++ b/build/config/sysroot.gni @@ -1,12 +1,10 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # This header file defines the "sysroot" variable which is the absolute path # of the sysroot. If no sysroot applies, the variable will be an empty string. -import("//build/config/chrome_build.gni") - declare_args() { # The path of the sysroot that is applied when compiling using the target # toolchain. @@ -43,17 +41,17 @@ if (sysroot == "") { # By default build against a sysroot image downloaded from Cloud Storage # during gclient runhooks. if (current_cpu == "x64") { - sysroot = "$target_sysroot_dir/debian_sid_amd64-sysroot" + sysroot = "$target_sysroot_dir/debian_bullseye_amd64-sysroot" } else if (current_cpu == "x86") { - sysroot = "$target_sysroot_dir/debian_sid_i386-sysroot" + sysroot = "$target_sysroot_dir/debian_bullseye_i386-sysroot" } else if (current_cpu == "mipsel") { - sysroot = "$target_sysroot_dir/debian_sid_mips-sysroot" + sysroot = "$target_sysroot_dir/debian_bullseye_mips-sysroot" } else if (current_cpu == "mips64el") { - sysroot = "$target_sysroot_dir/debian_sid_mips64el-sysroot" + sysroot = "$target_sysroot_dir/debian_bullseye_mips64el-sysroot" } else if (current_cpu == "arm") { - sysroot = "$target_sysroot_dir/debian_sid_arm-sysroot" + sysroot = "$target_sysroot_dir/debian_bullseye_arm-sysroot" } else if (current_cpu == "arm64") { - sysroot = "$target_sysroot_dir/debian_sid_arm64-sysroot" + sysroot = "$target_sysroot_dir/debian_bullseye_arm64-sysroot" } else { assert(false, "No linux sysroot for cpu: $target_cpu") } diff --git a/build/config/ui.gni b/build/config/ui.gni index b0a4f30e9835..f9e6bb9a6141 100644 --- a/build/config/ui.gni +++ b/build/config/ui.gni @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -20,14 +20,9 @@ import("//build/config/chromecast_build.gni") import("//build/config/chromeos/args.gni") import("//build/config/chromeos/ui_mode.gni") +import("//build/config/ozone.gni") declare_args() { - # Indicates if Ozone is enabled. Ozone is a low-level library layer for Linux - # that does not require X11. - use_ozone = - is_chromeos || (is_chromecast && !is_android) || is_fuchsia || - is_linux && !is_starboard - # Indicates if the UI toolkit depends on X11. # Enabled by default. Can be disabled if Ozone only build is required and # vice-versa. @@ -42,18 +37,16 @@ declare_args() { # True means the UI is built using the "views" framework. toolkit_views = is_mac || is_win || is_linux || is_chromeos || is_fuchsia - # TODO(crbug.com/1171629): Remove is_chromeos_lacros. - # Whether we should use glib, a low level C utility library. - use_glib = (is_linux || is_chromeos_lacros) && !is_chromecast && !is_starboard + use_glib = + is_linux && !is_castos && !is_starboard && + # Avoid the need for glib when Android is building things via secondary + # toolchains. + target_os != "android" } -# TODO(crbug.com/1171629): Remove is_chromeos_lacros. -# Make sure glib is not used if building for ChromeOS/Chromecast -assert(!use_glib || ((is_linux || is_chromeos_lacros) && !is_chromecast)) +assert(!use_glib || (is_linux && !is_castos)) -# TODO(crbug.com/1171629): Remove is_chromeos_lacros. -# Whether to use atk, the Accessibility ToolKit library -use_atk = (is_linux || is_chromeos_lacros) && !is_chromecast && use_glib +use_atk = use_glib && current_toolchain == default_toolchain # Whether using Xvfb to provide a display server for a test might be # necessary. diff --git a/build/config/v8_target_cpu.gni b/build/config/v8_target_cpu.gni index 305981f3fa53..6c41226a657e 100644 --- a/build/config/v8_target_cpu.gni +++ b/build/config/v8_target_cpu.gni @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -44,6 +44,9 @@ if (v8_target_cpu == "") { } else if (current_toolchain == "//build/toolchain/linux:clang_x86_v8_mipsel") { v8_target_cpu = "mipsel" + } else if (current_toolchain == + "//build/toolchain/linux:clang_x64_v8_riscv64") { + v8_target_cpu = "riscv64" } else if (is_msan) { # If we're running under a sanitizer, if we configure v8 to generate # code that will be run under a simulator, then the generated code diff --git a/build/config/win/BUILD.gn b/build/config/win/BUILD.gn index 3b89f434713a..a0a28102a789 100644 --- a/build/config/win/BUILD.gn +++ b/build/config/win/BUILD.gn @@ -1,7 +1,8 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +import("//build/config/c++/c++.gni") import("//build/config/chrome_build.gni") import("//build/config/clang/clang.gni") import("//build/config/compiler/compiler.gni") @@ -12,6 +13,7 @@ if (!is_starboard) { import("//build/timestamp.gni") } import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") import("//build/toolchain/toolchain.gni") assert(is_win) @@ -67,7 +69,14 @@ config("compiler") { ] if (is_clang) { - cflags += [ "/Zc:twoPhase" ] + cflags += [ + "/Zc:twoPhase", + + # Consistently use backslash as the path separator when expanding the + # __FILE__ macro when targeting Windows regardless of the build + # environment. + "-ffile-reproducible", + ] } # Force C/C++ mode for the given GN detected file type. This is necessary @@ -98,9 +107,17 @@ config("compiler") { } if (current_cpu == "x86") { - cflags += [ "-m32" ] + if (host_cpu == "x86" || host_cpu == "x64") { + cflags += [ "-m32" ] + } else { + cflags += [ "--target=i386-windows" ] + } } else if (current_cpu == "x64") { - cflags += [ "-m64" ] + if (host_cpu == "x86" || host_cpu == "x64") { + cflags += [ "-m64" ] + } else { + cflags += [ "--target=x86_64-windows" ] + } } else if (current_cpu == "arm64") { cflags += [ "--target=arm64-windows" ] } else { @@ -114,13 +131,13 @@ config("compiler") { cflags += [ "-msse3" ] } - if (exec_script("//build/win/use_ansi_codes.py", [], "trim string") == + # Enable ANSI escape codes if something emulating them is around (cmd.exe + # doesn't understand ANSI escape codes by default). Make sure to not enable + # this if goma/remoteexec is in use, because this will lower cache hits. + if (!use_goma && !use_remoteexec && + exec_script("//build/win/use_ansi_codes.py", [], "trim string") == "True") { - cflags += [ - # cmd.exe doesn't understand ANSI escape codes by default, - # so only enable them if something emulating them is around. - "-fansi-escape-codes", - ] + cflags += [ "-fansi-escape-codes" ] } if (use_clang_diagnostics_format) { @@ -128,7 +145,8 @@ config("compiler") { } } - if (use_lld && !use_thin_lto && (is_clang || !use_goma)) { + # Disabled with cc_wrapper because of https://github.com/mozilla/sccache/issues/264 + if (use_lld && !use_thin_lto && (is_clang || !use_goma) && cc_wrapper == "") { # /Brepro lets the compiler not write the mtime field in the .obj output. # link.exe /incremental relies on this field to work correctly, but lld # never looks at this timestamp, so it's safe to pass this flag with @@ -152,6 +170,15 @@ config("compiler") { ldflags += [ "/lldignoreenv" ] } + # Some binaries create PDBs larger than 4 GiB. Increasing the PDB page size + # to 8 KiB allows 8 GiB PDBs. The larger page size also allows larger block maps + # which is a PDB limit that was hit in https://crbug.com/1406510. The page size + # can easily be increased in the future to allow even larger PDBs or larger + # block maps. + # This flag requires lld-link.exe or link.exe from VS 2022 or later to create + # the PDBs, and tools from circa 22H2 or later to consume the PDBs. + ldflags += [ "/pdbpagesize:8192" ] + if (!is_debug && !is_component_build) { # Enable standard linker optimizations like GC (/OPT:REF) and ICF in static # release builds. @@ -258,16 +285,13 @@ config("runtime_library") { } } -# Chromium supports running on Windows 7, but if these constants are set to -# Windows 7, then newer APIs aren't made available by the Windows SDK. -# So we set this to Windows 10 and then are careful to check at runtime -# to only call newer APIs when they're available. +# Chromium only supports Windowes 10+. # Some third-party libraries assume that these defines set what version of # Windows is available at runtime. Targets using these libraries need to # manually override this config for their compiles. config("winver") { defines = [ - "NTDDI_VERSION=NTDDI_WIN10_VB", + "NTDDI_VERSION=NTDDI_WIN10_NI", # We can't say `=_WIN32_WINNT_WIN10` here because some files do # `#if WINVER < 0x0600` without including windows.h before, @@ -279,48 +303,11 @@ config("winver") { # Linker flags for Windows SDK setup, this is applied only to EXEs and DLLs. config("sdk_link") { - assert(current_cpu == "x64" || current_cpu == "x86" || current_cpu == "arm" || - current_cpu == "arm64", - "Only supports x64, x86, arm and arm64 CPUs") - if (current_cpu == "x64") { - ldflags = [ "/MACHINE:X64" ] - } else if (current_cpu == "x86") { + if (current_cpu == "x86") { ldflags = [ - "/MACHINE:X86", "/SAFESEH", # Not compatible with x64 so use only for x86. "/largeaddressaware", ] - } else if (current_cpu == "arm") { - ldflags = [ "/MACHINE:ARM" ] - } else if (current_cpu == "arm64") { - ldflags = [ "/MACHINE:ARM64" ] - } - - if (!is_starboard) { - vcvars_toolchain_data = exec_script("../../toolchain/win/setup_toolchain.py", - [ - visual_studio_path, - windows_sdk_path, - visual_studio_runtime_dirs, - current_os, - current_cpu, - "none", - ], - "scope") - - vc_lib_path = vcvars_toolchain_data.vc_lib_path - if (defined(vcvars_toolchain_data.vc_lib_atlmfc_path)) { - vc_lib_atlmfc_path = vcvars_toolchain_data.vc_lib_atlmfc_path - } - vc_lib_um_path = vcvars_toolchain_data.vc_lib_um_path - - lib_dirs = [ - "$vc_lib_um_path", - "$vc_lib_path", - ] - if (defined(vc_lib_atlmfc_path)) { - lib_dirs += [ "$vc_lib_atlmfc_path" ] - } } } @@ -344,17 +331,26 @@ config("common_linker_setup") { } } +# Flags that should be applied to building .exe files but not .dll files. +config("exe_flags") { + rustflags = [ "-Ctarget-feature=+crt-static" ] +} + config("default_cfg_compiler") { # Emit table of address-taken functions for Control-Flow Guard (CFG). # This is needed to allow functions to be called by code that is built # with CFG enabled, such as system libraries. # The CFG guards are only emitted if |win_enable_cfg_guards| is enabled. - if (is_clang) { - if (win_enable_cfg_guards) { + if (win_enable_cfg_guards) { + if (is_clang) { cflags = [ "/guard:cf" ] - } else { + } + rustflags = [ "-Ccontrol-flow-guard" ] + } else { + if (is_clang) { cflags = [ "/guard:cf,nochecks" ] } + rustflags = [ "-Ccontrol-flow-guard=nochecks" ] } } @@ -367,6 +363,7 @@ config("disable_guards_cfg_compiler") { if (is_clang) { cflags = [ "/guard:cf,nochecks" ] } + rustflags = [ "-Ccontrol-flow-guard=nochecks" ] } config("cfi_linker") { @@ -493,8 +490,17 @@ config("default_crt") { config("release_crt") { if (is_component_build) { cflags = [ "/MD" ] + + if (use_custom_libcxx) { + # On Windows, including libcpmt[d]/msvcprt[d] explicitly links the C++ + # standard library, which libc++ needs for exception_ptr internals. + ldflags = [ "/DEFAULTLIB:msvcprt.lib" ] + } } else { cflags = [ "/MT" ] + if (use_custom_libcxx) { + ldflags = [ "/DEFAULTLIB:libcpmt.lib" ] + } } } @@ -502,8 +508,14 @@ config("dynamic_crt") { if (is_debug) { # This pulls in the DLL debug CRT and defines _DEBUG cflags = [ "/MDd" ] + if (use_custom_libcxx) { + ldflags = [ "/DEFAULTLIB:msvcprtd.lib" ] + } } else { cflags = [ "/MD" ] + if (use_custom_libcxx) { + ldflags = [ "/DEFAULTLIB:msvcprt.lib" ] + } } } @@ -511,8 +523,14 @@ config("static_crt") { if (is_debug) { # This pulls in the static debug CRT and defines _DEBUG cflags = [ "/MTd" ] + if (use_custom_libcxx) { + ldflags = [ "/DEFAULTLIB:libcpmtd.lib" ] + } } else { cflags = [ "/MT" ] + if (use_custom_libcxx) { + ldflags = [ "/DEFAULTLIB:libcpmt.lib" ] + } } } diff --git a/build/config/win/console_app.gni b/build/config/win/console_app.gni index cac2ef5d7310..038801e9fa00 100644 --- a/build/config/win/console_app.gni +++ b/build/config/win/console_app.gni @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/win/control_flow_guard.gni b/build/config/win/control_flow_guard.gni index bf6a82af0c38..176947f7fa67 100644 --- a/build/config/win/control_flow_guard.gni +++ b/build/config/win/control_flow_guard.gni @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -7,7 +7,7 @@ import("//build/config/sanitizers/sanitizers.gni") declare_args() { # Set this to true to enable generation of CFG indirect call dispatch # guards. - win_enable_cfg_guards = false + win_enable_cfg_guards = !is_debug && !is_asan } if (win_enable_cfg_guards) { diff --git a/build/config/win/manifest.gni b/build/config/win/manifest.gni index e2115083fe57..e1859eacded3 100644 --- a/build/config/win/manifest.gni +++ b/build/config/win/manifest.gni @@ -1,4 +1,4 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/config/win/visual_studio_version.gni b/build/config/win/visual_studio_version.gni index 81e421631849..5c2c27e8732c 100644 --- a/build/config/win/visual_studio_version.gni +++ b/build/config/win/visual_studio_version.gni @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -46,11 +46,10 @@ if (is_starboard) { declare_args() { # Path to Visual Studio. If empty, the default is used which is to use the # automatic toolchain in depot_tools. If set, you must also set the - # visual_studio_version and wdk_path. + # visual_studio_version, wdk_path and windows_sdk_version. visual_studio_path = "" # Version of Visual Studio pointed to by the visual_studio_path. - # Currently always "2015". visual_studio_version = "" # Directory of the Windows driver kit. If visual_studio_path is empty, this @@ -61,6 +60,9 @@ declare_args() { # This value is the default location, override if you have a different # installation location. windows_sdk_path = "C:\Program Files (x86)\Windows Kits\10" + + # Version of the Windows SDK pointed to by the windows_sdk_path. + windows_sdk_version = "" } } @@ -68,6 +70,7 @@ if (visual_studio_path == "") { toolchain_data = exec_script("../../vs_toolchain.py", [ "get_toolchain_dir" ], "scope") visual_studio_path = toolchain_data.vs_path + windows_sdk_version = toolchain_data.sdk_version windows_sdk_path = toolchain_data.sdk_path visual_studio_version = toolchain_data.vs_version wdk_path = toolchain_data.wdk_dir @@ -75,6 +78,8 @@ if (visual_studio_path == "") { } else if (!is_starboard) { assert(visual_studio_version != "", "You must set the visual_studio_version if you set the path") + assert(windows_sdk_version != "", + "You must set the windows_sdk_version if you set the path") assert(wdk_path != "", "You must set the wdk_path if you set the visual studio path") visual_studio_runtime_dirs = [] diff --git a/build/config/x64.gni b/build/config/x64.gni deleted file mode 100644 index 9e86979cbcdb..000000000000 --- a/build/config/x64.gni +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# These are primarily relevant in current_cpu == "x64" contexts, where -# X64 code is being compiled. -if (current_cpu == "x64") { - declare_args() { - # The micro architecture of x64 cpu. This will be a string like "haswell" or - # "skylake". An empty string means to use the default architecture which is - # "x86-64". - # CPU options for "x86-64" in GCC can be found at - # https://gcc.gnu.org/onlinedocs/gcc/x86-Options.html - # CPU options for "x86-64" in llvm can be found at - # https://github.com/llvm/llvm-project/blob/master/llvm/include/llvm/Support/X86TargetParser.def - x64_arch = "" - } - - if ((is_posix && !is_apple) || is_fuchsia) { - if (x64_arch == "") { - x64_arch = "x86-64" - } - } -} diff --git a/build/config/zip.gni b/build/config/zip.gni index 68bc49444ccc..d623a0d0a9c7 100644 --- a/build/config/zip.gni +++ b/build/config/zip.gni @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -13,6 +13,8 @@ import("python.gni") # base_dir (optional) # If provided, the archive paths will be relative to this directory. # Applies only to |inputs|. +# zip_comment_values (optional) +# A list of key=value strings to store in a JSON-encoded archive comment. # # deps, public_deps, data, data_deps, testonly, visibility # Normal meaning. @@ -36,6 +38,15 @@ template("zip") { rebase_path(invoker.output, root_build_dir), ] + if (defined(invoker.zip_comment_values)) { + foreach(comment, invoker.zip_comment_values) { + args += [ + "--comment-json", + comment, + ] + } + } + _rebased_inputs = rebase_path(invoker.inputs, root_build_dir) args += [ "--input-files=$_rebased_inputs" ] if (defined(invoker.base_dir)) { diff --git a/build/config/zos/BUILD.gn b/build/config/zos/BUILD.gn new file mode 100644 index 000000000000..082ac1d389d7 --- /dev/null +++ b/build/config/zos/BUILD.gn @@ -0,0 +1,57 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/sanitizers/sanitizers.gni") +import("//build/toolchain/toolchain.gni") + +# This is included by reference in the //build/config/compiler config that +# is applied to all targets. It is here to separate out the logic. + +config("compiler") { + defines = [ + "_AE_BIMODAL=1", + "_ALL_SOURCE", + "_ENHANCED_ASCII_EXT=0xFFFFFFFF", + "_Export=extern", + "_LARGE_TIME_API", + "_OPEN_MSGQ_EXT", + "_OPEN_SYS_FILE_EXT=1", + "_OPEN_SYS_SOCK_IPV6 ", + "_UNIX03_SOURCE ", + "_UNIX03_THREADS", + "_UNIX03_WITHDRAWN", + "_XOPEN_SOURCE=600", + "_XOPEN_SOURCE_EXTENDED", + "__static_assert=static_assert", + "PATH_MAX=1024", + ] + + cflags = [ + "-q64", + "-qASCII", + "-Wc,DLL", + "-Wa,GOFF", + "-qENUM=INT", + "-qEXPORTALL", + "-qASM", + "-qmakedep", + "-qARCH=10", + "-qTUNE=10", + "-qasmlib=sys1.maclib:sys1.modgen", + "-qfloat=IEEE", + "-qlibansi", + "-qgonumber", + "-qlongname", + ] + + cflags_cc = [ + ] + + asmflags = [ + "-Wa,GOFF", + ] + + ldflags = [ + ] +} diff --git a/build/copy_test_data_ios.py b/build/copy_test_data_ios.py index cd23fb29bd5d..69b957a72e20 100755 --- a/build/copy_test_data_ios.py +++ b/build/copy_test_data_ios.py @@ -1,11 +1,10 @@ -#!/usr/bin/env python -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Copies test data files or directories into a given output directory.""" -from __future__ import print_function import optparse import os diff --git a/build/cp.py b/build/cp.py index 0f32536b624b..2bcf55cbc255 100755 --- a/build/cp.py +++ b/build/cp.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/del_ninja_deps_cache.py b/build/del_ninja_deps_cache.py new file mode 100755 index 000000000000..c2560de660a2 --- /dev/null +++ b/build/del_ninja_deps_cache.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Delete .ninja_deps if it references files inside a libc++ dir which has +since been reverted back to a file, and would cause Ninja fail on Windows. See +crbug.com/1337238""" + +import os +import sys + + +def main(): + os.chdir(os.path.join(os.path.dirname(__file__), '..')) + + # Paths that have switched between being a directory and regular file. + bad_dirs = [ + 'buildtools/third_party/libc++/trunk/include/__string', + 'buildtools/third_party/libc++/trunk/include/__tuple', + ] + + for bad_dir in bad_dirs: + if os.path.isdir(bad_dir): + # If it's a dir, .ninja_deps referencing files in it is not a problem. + continue + + for out_dir in os.listdir('out'): + ninja_deps = os.path.join('out', out_dir, '.ninja_deps') + try: + if str.encode(bad_dir) + b'/' in open(ninja_deps, 'rb').read(): + print('Deleting', ninja_deps) + os.remove(ninja_deps) + except FileNotFoundError: + pass + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/detect_host_arch.py b/build/detect_host_arch.py index cad0f4bf2a4e..c9d47e9139fb 100755 --- a/build/detect_host_arch.py +++ b/build/detect_host_arch.py @@ -1,11 +1,10 @@ -#!/usr/bin/env python -# Copyright 2014 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Outputs host CPU architecture in format recognized by gyp.""" -from __future__ import print_function import platform import re diff --git a/build/dir_exists.py b/build/dir_exists.py index 70d367ec2690..da9813f6093f 100755 --- a/build/dir_exists.py +++ b/build/dir_exists.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright (c) 2011 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2011 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Writes True if the argument is a directory.""" diff --git a/build/docs/debugging_slow_builds.md b/build/docs/debugging_slow_builds.md index 315690c32f08..9bba5530bca8 100644 --- a/build/docs/debugging_slow_builds.md +++ b/build/docs/debugging_slow_builds.md @@ -1,12 +1,38 @@ -# Debugging slow builds - -Some tips for debugging slow build times: -* Use [ninjatracing](https://github.com/nico/ninjatracing) and chrome:tracing to - view a timeline of the most recent build. - * Many bots output a build trace (look for a `"ninja_log"` link). -* Use `gn gen --tracelog trace.json` to create a similar trace for `gn gen`. -* Depot Tool's `autoninja` has logic for summarizing slow steps. Enable it via: - * `NINJA_SUMMARIZE_BUILD=1 autoninja -C out/Debug my_target` +# Debugging Slow Builds + +Did you know that Ninja writes a log to disk after each build? + +To see what kinds of files took the longest for your previous build: + +```sh +cd out/Default +# Lives in depot_tools: +post_build_ninja_summary.py +``` + +You can also set `NINJA_SUMMARIZE_BUILD=1` to have this command run +after each `autoninja` invocation (also runs ninja with `-d stats`). + +To generate a Chrome trace of your most recent build: + +```sh +git clone https://github.com/nico/ninjatracing +ninjatracing/ninjatracing out/Default/.ninja_log > trace.json +# Then open in https://ui.perfetto.dev/ +``` + +## Slow Bot Builds + +Our bots run `ninjatracing` and `post_build_ninja_summary.py` as well. + +Find the trace at: `postprocess_for_goma > upload_log > ninja_log`: + + * _".ninja_log in table format (full)"_ is for `post_build_ninja_summary.py`. + * _"trace viewer (sort_by_end)"_ is for `ninjatracing`. + +## Advanced(ish) Tips + +* Use `gn gen --tracelog trace.json` to create a trace for `gn gen`. * Many Android templates make use of [`md5_check.py`](https://cs.chromium.org/chromium/src/build/android/gyp/util/md5_check.py) to optimize incremental builds. @@ -16,4 +42,7 @@ Some tips for debugging slow build times: builds: * Use `ninja -n -d explain` to figure out why ninja thinks a target is dirty. * Ensure actions are taking advantage of ninja's `restat=1` feature by not - updating timestamps on outputs when their content does not change. + updating timestamps on outputs when their contents do not change. + * E.g. by using [`build_utils.AtomicOutput()`] + +[`build_utils.AtomicOutput()`]: https://source.chromium.org/search?q=symbol:AtomicOutput%20f:build diff --git a/build/docs/writing_gn_templates.md b/build/docs/writing_gn_templates.md index 21a73bb65ec2..9171265ec1dd 100644 --- a/build/docs/writing_gn_templates.md +++ b/build/docs/writing_gn_templates.md @@ -1,6 +1,6 @@ # Writing GN Templates GN and Ninja are documented here: -* GN: https://gn.googlesource.com/gn/+/master/docs/ +* GN: https://gn.googlesource.com/gn/+/main/docs/ * Ninja: https://ninja-build.org/manual.html [TOC] @@ -40,6 +40,9 @@ won't exist for the initial build. depfiles. * Stale paths in depfiles can cause ninja to complain of circular dependencies [in some cases](https://bugs.chromium.org/p/chromium/issues/detail?id=639042). + * Use [`action_helpers.write_depfile()`] to write these. + +[`action_helpers.write_depfile()`]: https://source.chromium.org/chromium/chromium/src/+/main:build/action_helpers.py?q=symbol:%5Cbwrite_depfile ### Ensuring "gn analyze" Knows About your Inputs "gn analyze" is used by bots to run only affected tests and build only affected @@ -101,7 +104,7 @@ Rationale: * use `$target_out_dir/$target_name.$EXTENSION`. **Option 3:** For outputs that are required at runtime -(e.g. [runtime_deps](https://gn.googlesource.com/gn/+/master/docs/reference.md#runtime_deps)), +(e.g. [runtime_deps](https://gn.googlesource.com/gn/+/main/docs/reference.md#runtime_deps)), options 1 & 2 do not work because they are not archived in builder/tester bot configurations. In this case: * use `$root_out_dir/gen.runtime` or `$root_out_dir/obj.runtime`. @@ -136,19 +139,23 @@ Outputs should be atomic and take advantage of `restat=1`. short-circuits a build when output timestamps do not change. This feature is the reason that the total number of build steps sometimes decreases when building.. -* Use [`build_utils.AtomicOutput()`](https://cs.chromium.org/chromium/src/build/android/gyp/util/build_utils.py?rcl=7d6ba28e92bec865a7b7876c35b4621d56fb37d8&l=128) - to perform both of these techniques. +* Use [`action_helpers.atomic_output()`] to perform both of these techniques. + +[`action_helpers.atomic_output()`]: https://source.chromium.org/chromium/chromium/src/+/main:build/action_helpers.py?q=symbol:%5Cbatomic_output Actions should be deterministic in order to avoid hard-to-reproduce bugs. Given identical inputs, they should produce byte-for-byte identical outputs. * Some common mistakes: * Depending on filesystem iteration order. - * Writing timestamps in files (or in zip entries). * Writing absolute paths in outputs. + * Writing timestamps in files (or in zip entries). + * Tip: Use [`zip_helpers.py`] when writing `.zip` files. + +[`zip_helpers.py`]: https://source.chromium.org/chromium/chromium/src/+/main:build/zip_helpers.py ## Style Guide Chromium GN files follow -[GN's Style Guide](https://gn.googlesource.com/gn/+/master/docs/style_guide.md) +[GN's Style Guide](https://gn.googlesource.com/gn/+/main/docs/style_guide.md) with a few additions. ### Action Granularity @@ -211,7 +218,7 @@ only be applied to the final target (the one named `target_name`). Applying only to the final target ensures that the invoker-provided visibility does not prevent intermediate targets from depending on each other. -[visibility]: https://gn.googlesource.com/gn/+/master/docs/reference.md#var_visibility +[visibility]: https://gn.googlesource.com/gn/+/main/docs/reference.md#var_visibility Example: ```python @@ -316,7 +323,7 @@ prevented from depending on the other ones). Example: ```python template("template_with_multiple_targets") { - action("${target_name}__helper) { + action("${target_name}__helper") { forward_variables_from(invoker, [ "testonly" ]) ... } @@ -331,7 +338,7 @@ An alternative would be to explicitly set `visibility` on all inner targets, but doing so tends to be tedious and has little benefit. [this bug]: https://bugs.chromium.org/p/chromium/issues/detail?id=862232 -[forward_variables_from]: https://gn.googlesource.com/gn/+/master/docs/reference.md#func_forward_variables_from +[forward_variables_from]: https://gn.googlesource.com/gn/+/main/docs/reference.md#func_forward_variables_from ## Useful Ninja Flags Useful ninja flags when developing build rules: diff --git a/build/dotfile_settings.gni b/build/dotfile_settings.gni index 3d869b37384c..50c04a8c0caa 100644 --- a/build/dotfile_settings.gni +++ b/build/dotfile_settings.gni @@ -1,4 +1,4 @@ -# Copyright (c) 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -7,8 +7,6 @@ build_dotfile_settings = { exec_script_whitelist = [ - "//build/config/android/config.gni", - "//build/config/android/internal_rules.gni", "//build/config/android/rules.gni", "//build/config/chromeos/rules.gni", "//build/config/compiler/BUILD.gn", @@ -25,10 +23,11 @@ build_dotfile_settings = { "//build/config/mac/mac_sdk.gni", "//build/config/mac/rules.gni", "//build/config/posix/BUILD.gn", - "//build/config/python.gni", + "//build/config/rust.gni", "//build/config/sysroot.gni", "//build/config/win/BUILD.gn", "//build/config/win/visual_studio_version.gni", + "//build/rust/analyze.gni", "//build/timestamp.gni", "//build/toolchain/apple/toolchain.gni", "//build/toolchain/BUILD.gn", @@ -37,7 +36,8 @@ build_dotfile_settings = { "//build/toolchain/nacl/BUILD.gn", "//build/toolchain/toolchain.gni", "//build/toolchain/win/BUILD.gn", + "//build/toolchain/win/win_toolchain_data.gni", + "//build/toolchain/zos/BUILD.gn", "//build/util/branding.gni", - "//build/util/version.gni", ] } diff --git a/build/download_nacl_toolchains.py b/build/download_nacl_toolchains.py index 286a92a27ee9..1b86a4bb9e19 100755 --- a/build/download_nacl_toolchains.py +++ b/build/download_nacl_toolchains.py @@ -1,11 +1,10 @@ -#!/usr/bin/env python -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Shim to run nacl toolchain download script only if there is a nacl dir.""" -from __future__ import print_function import os import shutil diff --git a/build/env_dump.py b/build/env_dump.py index 3f8217398c44..1eaf8dc92125 100755 --- a/build/env_dump.py +++ b/build/env_dump.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2013 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/extract_from_cab.py b/build/extract_from_cab.py index 9695b96789b7..c7ae6d9f499f 100755 --- a/build/extract_from_cab.py +++ b/build/extract_from_cab.py @@ -1,11 +1,10 @@ -#!/usr/bin/env python -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Extracts a single file from a CAB archive.""" -from __future__ import print_function import os import shutil diff --git a/build/extract_partition.py b/build/extract_partition.py index 4b2f06400bc1..319ce8fc7f45 100755 --- a/build/extract_partition.py +++ b/build/extract_partition.py @@ -1,12 +1,129 @@ -#!/usr/bin/env python -# Copyright 2019 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Extracts an LLD partition from an ELF file.""" import argparse +import hashlib +import math +import os +import struct import subprocess import sys +import tempfile + + +def _ComputeNewBuildId(old_build_id, file_path): + """ + Computes the new build-id from old build-id and file_path. + + Args: + old_build_id: Original build-id in bytearray. + file_path: Path to output ELF file. + + Returns: + New build id with the same length as |old_build_id|. + """ + m = hashlib.sha256() + m.update(old_build_id) + m.update(os.path.basename(file_path).encode('utf-8')) + hash_bytes = m.digest() + # In case build_id is longer than hash computed, repeat the hash + # to the desired length first. + id_size = len(old_build_id) + hash_size = len(hash_bytes) + return (hash_bytes * (id_size // hash_size + 1))[:id_size] + + +def _ExtractPartition(objcopy, input_elf, output_elf, partition): + """ + Extracts a partition from an ELF file. + + For partitions other than main partition, we need to rewrite + the .note.gnu.build-id section so that the build-id remains + unique. + + Note: + - `objcopy` does not modify build-id when partitioning the + combined ELF file by default. + - The new build-id is calculated as hash of original build-id + and partitioned ELF file name. + + Args: + objcopy: Path to objcopy binary. + input_elf: Path to input ELF file. + output_elf: Path to output ELF file. + partition: Partition to extract from combined ELF file. None when + extracting main partition. + """ + if not partition: # main partition + # We do not overwrite build-id on main partition to allow the expected + # partition build ids to be synthesized given a libchrome.so binary, + # if necessary. + subprocess.check_call( + [objcopy, '--extract-main-partition', input_elf, output_elf]) + return + + # partitioned libs + build_id_section = '.note.gnu.build-id' + + with tempfile.TemporaryDirectory() as tempdir: + temp_elf = os.path.join(tempdir, 'obj_without_id.so') + old_build_id_file = os.path.join(tempdir, 'old_build_id') + new_build_id_file = os.path.join(tempdir, 'new_build_id') + + # Dump out build-id section and remove original build-id section from + # ELF file. + subprocess.check_call([ + objcopy, + '--extract-partition', + partition, + # Note: Not using '--update-section' here as it is not supported + # by llvm-objcopy. + '--remove-section', + build_id_section, + '--dump-section', + '{}={}'.format(build_id_section, old_build_id_file), + input_elf, + temp_elf, + ]) + + with open(old_build_id_file, 'rb') as f: + note_content = f.read() + + # .note section has following format according to + # typedef struct { + # unsigned char namesz[4]; /* Size of entry's owner string */ + # unsigned char descsz[4]; /* Size of the note descriptor */ + # unsigned char type[4]; /* Interpretation of the descriptor */ + # char name[1]; /* Start of the name+desc data */ + # } Elf_External_Note; + # `build-id` rewrite is only required on Android platform, + # where we have partitioned lib. + # Android platform uses little-endian. + # <: little-endian + # 4x: Skip 4 bytes + # L: unsigned long, 4 bytes + descsz, = struct.Struct('<4xL').unpack_from(note_content) + prefix = note_content[:-descsz] + build_id = note_content[-descsz:] + + with open(new_build_id_file, 'wb') as f: + f.write(prefix + _ComputeNewBuildId(build_id, output_elf)) + + # Write back the new build-id section. + subprocess.check_call([ + objcopy, + '--add-section', + '{}={}'.format(build_id_section, new_build_id_file), + # Add alloc section flag, or else the section will be removed by + # objcopy --strip-all when generating unstripped lib file. + '--set-section-flags', + '{}={}'.format(build_id_section, 'alloc'), + temp_elf, + output_elf, + ]) def main(): @@ -30,31 +147,29 @@ def main(): required=True, help='Stripped output file', metavar='FILE') - parser.add_argument('--dwp', help='Path to dwp binary', metavar='FILE') + parser.add_argument('--split-dwarf', action='store_true') parser.add_argument('input', help='Input file') args = parser.parse_args() - objcopy_args = [args.objcopy] - if args.partition: - objcopy_args += ['--extract-partition', args.partition] - else: - objcopy_args += ['--extract-main-partition'] - objcopy_args += [args.input, args.unstripped_output] - subprocess.check_call(objcopy_args) - - objcopy_args = [ - args.objcopy, '--strip-all', args.unstripped_output, args.stripped_output - ] - subprocess.check_call(objcopy_args) - - if args.dwp: - dwp_args = [ - args.dwp, '-e', args.unstripped_output, '-o', - args.unstripped_output + '.dwp' - ] - # Suppress output here because it doesn't seem to be useful. The most - # common error is a segfault, which will happen if files are missing. - subprocess.check_output(dwp_args, stderr=subprocess.STDOUT) + _ExtractPartition(args.objcopy, args.input, args.unstripped_output, + args.partition) + subprocess.check_call([ + args.objcopy, + '--strip-all', + args.unstripped_output, + args.stripped_output, + ]) + + # Debug info for partitions is the same as for the main library, so just + # symlink the .dwp files. + if args.split_dwarf: + dest = args.unstripped_output + '.dwp' + try: + os.unlink(dest) + except OSError: + pass + relpath = os.path.relpath(args.input + '.dwp', os.path.dirname(dest)) + os.symlink(relpath, dest) if __name__ == '__main__': diff --git a/build/find_depot_tools.py b/build/find_depot_tools.py index 49a9138ec31c..f891a414837a 100755 --- a/build/find_depot_tools.py +++ b/build/find_depot_tools.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright (c) 2011 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2011 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Small utility function to find depot_tools and add it to the python path. @@ -11,7 +11,6 @@ directory location. """ -from __future__ import print_function import os import sys diff --git a/build/fix_gn_headers.py b/build/fix_gn_headers.py index 71fb332bfd18..5111b5db4d4c 100755 --- a/build/fix_gn_headers.py +++ b/build/fix_gn_headers.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2017 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -10,7 +10,6 @@ Manual cleaning up is likely required afterwards. """ -from __future__ import print_function import argparse import os diff --git a/build/fuchsia/COMMON_METADATA b/build/fuchsia/COMMON_METADATA new file mode 100644 index 000000000000..f7f8861f038a --- /dev/null +++ b/build/fuchsia/COMMON_METADATA @@ -0,0 +1,5 @@ +monorail { + component: "Fuchsia" +} +team_email: "fuchsia-dev@chromium.org" +os: FUCHSIA diff --git a/build/fuchsia/DIR_METADATA b/build/fuchsia/DIR_METADATA index fe8198aeaa36..210aa6a954b8 100644 --- a/build/fuchsia/DIR_METADATA +++ b/build/fuchsia/DIR_METADATA @@ -1,5 +1 @@ -monorail { - component: "Fuchsia" -} - -team_email: "cr-fuchsia@chromium.org" +mixins: "//build/fuchsia/COMMON_METADATA" diff --git a/build/fuchsia/OWNERS b/build/fuchsia/OWNERS index 3dcaf8a38824..887630d46eba 100644 --- a/build/fuchsia/OWNERS +++ b/build/fuchsia/OWNERS @@ -1,8 +1,18 @@ +# When picking a reviewer for Fuchsia-related OWNERShip, please start by looking +# at git history to find the most relevant owner. The team would appreciate it +# if you would also add chromium-fuchsia-reviews@google.com so that a shadowed +# reviewer is added automatically. Thank you. + ddorwin@chromium.org -fdegans@chromium.org -kmarshall@chromium.org +grt@chromium.org sergeyu@chromium.org wez@chromium.org -per-file linux.sdk.sha1=chromium-autoroll@skia-public.iam.gserviceaccount.com -per-file mac.sdk.sha1=chromium-autoroll@skia-public.iam.gserviceaccount.com +per-file *.py=chonggu@google.com +per-file *.py=rohpavone@chromium.org +per-file *.py=zijiehe@google.com + +per-file linux_internal.sdk.sha1=chromium-internal-autoroll@skia-corp.google.com.iam.gserviceaccount.com + +per-file SECURITY_OWNERS=set noparent +per-file SECURITY_OWNERS=file://build/fuchsia/SECURITY_OWNERS diff --git a/build/fuchsia/PRESUBMIT.py b/build/fuchsia/PRESUBMIT.py new file mode 100644 index 000000000000..f42f4c2309a5 --- /dev/null +++ b/build/fuchsia/PRESUBMIT.py @@ -0,0 +1,47 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Presubmit script for Fuchsia. + +See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for +details on the presubmit API built into depot_tools. +""" + +USE_PYTHON3 = True + +import os + + +def CommonChecks(input_api, output_api): + build_fuchsia_dir = input_api.PresubmitLocalPath() + + def J(*dirs): + """Returns a path relative to presubmit directory.""" + return input_api.os_path.join(build_fuchsia_dir, *dirs) + + tests = [] + unit_tests = [ + J('binary_sizes_test.py'), + J('binary_size_differ_test.py'), + J('gcs_download_test.py'), + J('update_images_test.py'), + J('update_product_bundles_test.py'), + J('update_sdk_test.py'), + ] + + tests.extend( + input_api.canned_checks.GetUnitTests(input_api, + output_api, + unit_tests=unit_tests, + run_on_python2=False, + run_on_python3=True, + skip_shebang_check=True)) + return input_api.RunTests(tests) + + +def CheckChangeOnUpload(input_api, output_api): + return CommonChecks(input_api, output_api) + + +def CheckChangeOnCommit(input_api, output_api): + return CommonChecks(input_api, output_api) diff --git a/build/fuchsia/SECURITY_OWNERS b/build/fuchsia/SECURITY_OWNERS new file mode 100644 index 000000000000..17e8b7c653a4 --- /dev/null +++ b/build/fuchsia/SECURITY_OWNERS @@ -0,0 +1,16 @@ +# Changes to integration with the Fuchsia platform, or peer components, require +# security review to avoid introducing sandbox escapes. These include: +# - Critical platform integrations (e.g. shared memory, process launching). +# - Changes to Chromium-defined Fuchsia IPC (aka FIDL) protocols. +# - Addition of new FIDL services to child process sandboxes. +# - Addition of new FIDL clients and implementations. +# +# Security team: If you are uncomfortable reviewing a particular bit of code +# yourself, don't hesitate to seek help from another security team member! +# Nobody knows everything, and the only way to learn is from experience. + +# Please keep reviewers ordered alphabetically by LDAP. +ajgo@chromium.org +rsesek@chromium.org +tsepez@chromium.org +wez@chromium.org diff --git a/build/fuchsia/aemu_target.py b/build/fuchsia/aemu_target.py deleted file mode 100644 index 249fbf029fda..000000000000 --- a/build/fuchsia/aemu_target.py +++ /dev/null @@ -1,126 +0,0 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Implements commands for running and interacting with Fuchsia on AEMU.""" - -import emu_target -import os -import platform -import qemu_target -import logging - -from common import GetEmuRootForPlatform - - -def GetTargetType(): - return AemuTarget - - -class AemuTarget(qemu_target.QemuTarget): - EMULATOR_NAME = 'aemu' - - def __init__(self, - out_dir, - target_cpu, - system_log_file, - cpu_cores, - require_kvm, - ram_size_mb, - enable_graphics, - hardware_gpu, - fuchsia_out_dir=None): - super(AemuTarget, - self).__init__(out_dir, target_cpu, system_log_file, cpu_cores, - require_kvm, ram_size_mb, fuchsia_out_dir) - - self._enable_graphics = enable_graphics - self._hardware_gpu = hardware_gpu - - @staticmethod - def CreateFromArgs(args): - return AemuTarget(args.out_dir, args.target_cpu, args.system_log_file, - args.cpu_cores, args.require_kvm, args.ram_size_mb, - args.enable_graphics, args.hardware_gpu, - args.fuchsia_out_dir) - - @staticmethod - def RegisterArgs(arg_parser): - aemu_args = arg_parser.add_argument_group('aemu', 'AEMU arguments') - aemu_args.add_argument('--enable-graphics', - action='store_true', - default=False, - help='Start AEMU with graphics instead of '\ - 'headless.') - aemu_args.add_argument('--hardware-gpu', - action='store_true', - default=False, - help='Use local GPU hardware instead of '\ - 'Swiftshader.') - - def _EnsureEmulatorExists(self, path): - assert os.path.exists(path), \ - 'This checkout is missing %s.' % (self.EMULATOR_NAME) - - def _BuildCommand(self): - aemu_folder = GetEmuRootForPlatform(self.EMULATOR_NAME) - - self._EnsureEmulatorExists(aemu_folder) - aemu_path = os.path.join(aemu_folder, 'emulator') - - # `VirtioInput` is needed for touch input device support on Fuchsia. - # `RefCountPipe` is needed for proper cleanup of resources when a process - # that uses Vulkan dies inside the guest - aemu_features = 'VirtioInput,RefCountPipe' - - # Configure the CPU to emulate. - # On Linux, we can enable lightweight virtualization (KVM) if the host and - # guest architectures are the same. - if self._IsKvmEnabled(): - aemu_features += ',KVM,GLDirectMem,Vulkan' - else: - if self._target_cpu != 'arm64': - aemu_features += ',-GLDirectMem' - - # Use Swiftshader for Vulkan if requested - gpu_target = 'swiftshader_indirect' - if self._hardware_gpu: - gpu_target = 'host' - - aemu_command = [aemu_path] - if not self._enable_graphics: - aemu_command.append('-no-window') - # All args after -fuchsia flag gets passed to QEMU - aemu_command.extend([ - '-feature', aemu_features, '-window-size', '1024x600', '-gpu', - gpu_target, '-verbose', '-fuchsia' - ]) - - aemu_command.extend(self._BuildQemuConfig()) - - aemu_command.extend([ - '-vga', 'none', - '-device', 'virtio-keyboard-pci', - '-device', 'virtio_input_multi_touch_pci_1', - '-device', 'ich9-ahci,id=ahci']) - if platform.machine() == 'x86_64': - aemu_command.extend(['-device', 'isa-debug-exit,iobase=0xf4,iosize=0x04']) - - logging.info(' '.join(aemu_command)) - return aemu_command - - def _GetVulkanIcdFile(self): - return os.path.join(GetEmuRootForPlatform(self.EMULATOR_NAME), 'lib64', - 'vulkan', 'vk_swiftshader_icd.json') - - def _SetEnv(self): - env = os.environ.copy() - aemu_logging_env = { - "ANDROID_EMU_VK_NO_CLEANUP": "1", - "ANDROID_EMUGL_LOG_PRINT": "1", - "ANDROID_EMUGL_VERBOSE": "1", - "VK_ICD_FILENAMES": self._GetVulkanIcdFile(), - "VK_LOADER_DEBUG": "info,error", - } - env.update(aemu_logging_env) - return env diff --git a/build/fuchsia/amber_repo.py b/build/fuchsia/amber_repo.py deleted file mode 100644 index 3b057fdbf064..000000000000 --- a/build/fuchsia/amber_repo.py +++ /dev/null @@ -1,172 +0,0 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import common -import json -import logging -import os -import shutil -import subprocess -import tempfile -import time - -from six.moves import urllib - - -# Maximum amount of time to block while waiting for "pm serve" to come up. -_PM_SERVE_LIVENESS_TIMEOUT_SECS = 10 - -_MANAGED_REPO_NAME = 'chrome_runner' - - -class AmberRepo(object): - """Abstract interface for a repository used to serve packages to devices.""" - - def __init__(self, target): - self._target = target - - def PublishPackage(self, package_path): - pm_tool = common.GetHostToolPathFromPlatform('pm') - subprocess.check_call( - [pm_tool, 'publish', '-a', '-f', package_path, '-r', self.GetPath(), - '-vt', '-v'], - stderr=subprocess.STDOUT) - - def GetPath(self): - pass - - -class ManagedAmberRepo(AmberRepo): - """Creates and serves packages from an ephemeral repository.""" - - def __init__(self, target): - AmberRepo.__init__(self, target) - self._with_count = 0 - - self._amber_root = tempfile.mkdtemp() - pm_tool = common.GetHostToolPathFromPlatform('pm') - subprocess.check_call([pm_tool, 'newrepo', '-repo', self._amber_root]) - logging.info('Creating and serving temporary Amber root: {}.'.format( - self._amber_root)) - - serve_port = common.GetAvailableTcpPort() - self._pm_serve_task = subprocess.Popen( - [pm_tool, 'serve', '-d', os.path.join(self._amber_root, 'repository'), - '-l', ':%d' % serve_port, '-q']) - - # Block until "pm serve" starts serving HTTP traffic at |serve_port|. - timeout = time.time() + _PM_SERVE_LIVENESS_TIMEOUT_SECS - while True: - try: - urllib.request.urlopen('http://localhost:%d' % serve_port, - timeout=1).read() - break - except urllib.error.URLError: - logging.info('Waiting until \'pm serve\' is up...') - - if time.time() >= timeout: - raise Exception('Timed out while waiting for \'pm serve\'.') - - time.sleep(1) - - remote_port = common.ConnectPortForwardingTask(target, serve_port, 0) - self._RegisterAmberRepository(self._amber_root, remote_port) - - def __enter__(self): - self._with_count += 1 - return self - - def __exit__(self, type, value, tb): - """Allows the repository to delete itself when it leaves the scope of a - 'with' block.""" - self._with_count -= 1 - if self._with_count > 0: - return - - logging.info('Cleaning up Amber root: ' + self._amber_root) - shutil.rmtree(self._amber_root) - self._amber_root = None - - self._UnregisterAmberRepository() - self._pm_serve_task.kill() - self._pm_serve_task = None - - def GetPath(self): - return self._amber_root - - def _RegisterAmberRepository(self, tuf_repo, remote_port): - """Configures a device to use a local TUF repository as an installation - source for packages. - |tuf_repo|: The host filesystem path to the TUF repository. - |remote_port|: The reverse-forwarded port used to connect to instance of - `pm serve` that is serving the contents of |tuf_repo|.""" - - # Extract the public signing key for inclusion in the config file. - root_keys = [] - root_json_path = os.path.join(tuf_repo, 'repository', 'root.json') - root_json = json.load(open(root_json_path, 'r')) - for root_key_id in root_json['signed']['roles']['root']['keyids']: - root_keys.append({ - 'Type': root_json['signed']['keys'][root_key_id]['keytype'], - 'Value': root_json['signed']['keys'][root_key_id]['keyval']['public'] - }) - - # "pm serve" can automatically generate a "config.json" file at query time, - # but the file is unusable because it specifies URLs with port - # numbers that are unreachable from across the port forwarding boundary. - # So instead, we generate our own config file with the forwarded port - # numbers instead. - config_file = open(os.path.join(tuf_repo, 'repository', 'repo_config.json'), - 'w') - json.dump({ - 'ID': _MANAGED_REPO_NAME, - 'RepoURL': "http://127.0.0.1:%d" % remote_port, - 'BlobRepoURL': "http://127.0.0.1:%d/blobs" % remote_port, - 'RatePeriod': 10, - 'RootKeys': root_keys, - 'StatusConfig': { - 'Enabled': True - }, - 'Auto': True - }, config_file) - config_file.close() - - # Register the repo. - return_code = self._target.RunCommand( - [('amberctl rm_src -n %s; ' + - 'amberctl add_src -f http://127.0.0.1:%d/repo_config.json') - % (_MANAGED_REPO_NAME, remote_port)]) - if return_code != 0: - raise Exception('Error code %d when running amberctl.' % return_code) - - - def _UnregisterAmberRepository(self): - """Unregisters the Amber repository.""" - - logging.debug('Unregistering Amber repository.') - self._target.RunCommand(['amberctl', 'rm_src', '-n', _MANAGED_REPO_NAME]) - - # Re-enable 'devhost' repo if it's present. This is useful for devices that - # were booted with 'fx serve'. - self._target.RunCommand(['amberctl', 'enable_src', '-n', 'devhost'], - silent=True) - - -class ExternalAmberRepo(AmberRepo): - """Publishes packages to an Amber repository located and served externally - (ie. located under a Fuchsia build directory and served by "fx serve".""" - - def __init__(self, amber_root): - self._amber_root = amber_root - logging.info('Using existing Amber root: {}'.format(amber_root)) - logging.info('Ensure that "fx serve" is running.') - - def GetPath(self): - return self._amber_root - - def __enter__(self): - return self - - def __exit__(self, type, value, tb): - pass diff --git a/build/fuchsia/binary_size_differ.py b/build/fuchsia/binary_size_differ.py new file mode 100755 index 000000000000..190a1731cfdd --- /dev/null +++ b/build/fuchsia/binary_size_differ.py @@ -0,0 +1,153 @@ +#!/usr/bin/env vpython3 +# +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +'''Implements Chrome-Fuchsia package binary size differ.''' + +import argparse +import json +import os +import sys +import traceback + +from binary_sizes import ReadPackageSizesJson +from binary_sizes import PACKAGES_SIZES_FILE + +# Eng is not responsible for changes that cause "reasonable growth" if the +# uncompressed binary size does not grow. +# First-warning will fail the test if the uncompressed and compressed size +# grow, while always-fail will fail the test regardless of uncompressed growth +# (solely based on compressed growth). +_FIRST_WARNING_DELTA_BYTES = 12 * 1024 # 12 KiB +_ALWAYS_FAIL_DELTA_BYTES = 100 * 1024 # 100 KiB +_TRYBOT_DOC = 'https://chromium.googlesource.com/chromium/src/+/main/docs/speed/binary_size/fuchsia_binary_size_trybot.md' + +SIZE_FAILURE = 1 +ROLLER_SIZE_WARNING = 2 +SUCCESS = 0 + + +def ComputePackageDiffs(before_sizes_file, after_sizes_file, author=None): + '''Computes difference between after and before diff, for each package.''' + before_sizes = ReadPackageSizesJson(before_sizes_file) + after_sizes = ReadPackageSizesJson(after_sizes_file) + + assert before_sizes.keys() == after_sizes.keys(), ( + 'Package files cannot' + ' be compared with different packages: ' + '{} vs {}'.format(before_sizes.keys(), after_sizes.keys())) + + growth = {'compressed': {}, 'uncompressed': {}} + status_code = SUCCESS + summary = '' + for package_name in before_sizes: + growth['compressed'][package_name] = (after_sizes[package_name].compressed - + before_sizes[package_name].compressed) + growth['uncompressed'][package_name] = ( + after_sizes[package_name].uncompressed - + before_sizes[package_name].uncompressed) + # Developers are only responsible if uncompressed increases. + if ((growth['compressed'][package_name] >= _FIRST_WARNING_DELTA_BYTES + and growth['uncompressed'][package_name] > 0) + # However, if compressed growth is unusually large, fail always. + or growth['compressed'][package_name] >= _ALWAYS_FAIL_DELTA_BYTES): + if not summary: + summary = ('Size check failed! The following package(s) are affected:' + '
    ') + status_code = SIZE_FAILURE + summary += (('- {} (compressed) grew by {} bytes (uncompressed growth:' + ' {} bytes).
    ').format( + package_name, growth['compressed'][package_name], + growth['uncompressed'][package_name])) + summary += ('Note that this bot compares growth against trunk, and is ' + 'not aware of CL chaining.
    ') + + # Allow rollers to pass even with size increases. See crbug.com/1355914. + if author and '-autoroll' in author and status_code == SIZE_FAILURE: + summary = summary.replace('Size check failed! ', '') + summary = ( + 'The following growth by an autoroller will be ignored:

    ' + + summary) + status_code = ROLLER_SIZE_WARNING + growth['status_code'] = status_code + summary += ('
    See the following document for more information about' + ' this trybot:
    {}'.format(_TRYBOT_DOC)) + growth['summary'] = summary + + # TODO(crbug.com/1266085): Investigate using these fields. + growth['archive_filenames'] = [] + growth['links'] = [] + return growth + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + '--before-dir', + type=os.path.realpath, + required=True, + help='Location of the build without the patch', + ) + parser.add_argument( + '--after-dir', + type=os.path.realpath, + required=True, + help='Location of the build with the patch', + ) + parser.add_argument('--author', help='Author of change') + parser.add_argument( + '--results-path', + type=os.path.realpath, + required=True, + help='Output path for the trybot result .json file', + ) + parser.add_argument('--verbose', + '-v', + action='store_true', + help='Enable verbose output') + args = parser.parse_args() + + if args.verbose: + print('Fuchsia binary sizes') + print('Working directory', os.getcwd()) + print('Args:') + for var in vars(args): + print(' {}: {}'.format(var, getattr(args, var) or '')) + + if not os.path.isdir(args.before_dir) or not os.path.isdir(args.after_dir): + raise Exception( + 'Could not find build output directory "{}" or "{}".'.format( + args.before_dir, args.after_dir)) + + test_name = 'sizes' + before_sizes_file = os.path.join(args.before_dir, test_name, + PACKAGES_SIZES_FILE) + after_sizes_file = os.path.join(args.after_dir, test_name, + PACKAGES_SIZES_FILE) + if not os.path.isfile(before_sizes_file): + raise Exception( + 'Could not find before sizes file: "{}"'.format(before_sizes_file)) + + if not os.path.isfile(after_sizes_file): + raise Exception( + 'Could not find after sizes file: "{}"'.format(after_sizes_file)) + + test_completed = False + try: + growth = ComputePackageDiffs(before_sizes_file, + after_sizes_file, + author=args.author) + test_completed = True + with open(args.results_path, 'wt') as results_file: + json.dump(growth, results_file) + except: + _, value, trace = sys.exc_info() + traceback.print_tb(trace) + print(str(value)) + finally: + return 0 if test_completed else 1 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/fuchsia/binary_size_differ_test.py b/build/fuchsia/binary_size_differ_test.py new file mode 100755 index 000000000000..6192bf2d6539 --- /dev/null +++ b/build/fuchsia/binary_size_differ_test.py @@ -0,0 +1,171 @@ +#!/usr/bin/env vpython3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import copy +import os +import tempfile +from typing import MutableMapping, Optional +import unittest + +import binary_size_differ +import binary_sizes + +_EXAMPLE_BLOBS_BEFORE = """ +{ + "web_engine": [ + { + "merkle": "77e876447dd2daaaab7048d646e87fe8b6d9fecef6cbfcc4af30b8fbfa50b881", + "path": "locales/ta.pak", + "bytes": 17916, + "is_counted": true, + "size": 16384 + }, + { + "merkle": "5f1932b8c9fe954f3c3fdb34ab2089d2af34e5a0cef90cad41a1cd37d92234bf", + "path": "lib/libEGL.so", + "bytes": 226960, + "is_counted": true, + "size": 90112 + }, + { + "merkle": "9822fc0dd95cdd1cc46b5c6632a928a6ad19b76ed0157397d82a2f908946fc34", + "path": "meta.far", + "bytes": 24576, + "is_counted": false, + "size": 16384 + }, + { + "merkle": "090aed4593c4f7d04a3ad80e9971c0532dd5b1d2bdf4754202cde510a88fd220", + "path": "locales/ru.pak", + "bytes": 11903, + "is_counted": true, + "size": 16384 + } + ] +} +""" + + +class BinarySizeDifferTest(unittest.TestCase): + def ChangePackageSize( + self, + packages: MutableMapping[str, binary_sizes.PackageSizes], + name: str, + compressed_increase: int, + uncompressed_increase: Optional[int] = None): + if uncompressed_increase is None: + uncompressed_increase = compressed_increase + original_package = packages[name] + new_package = binary_sizes.PackageSizes( + compressed=original_package.compressed + compressed_increase, + uncompressed=original_package.uncompressed + uncompressed_increase) + packages[name] = new_package + + def testComputePackageDiffs(self): + # TODO(1309977): Disabled on Windows because Windows doesn't allow opening a + # NamedTemporaryFile by name. + if os.name == 'nt': + return + + SUCCESS = 0 + FAILURE = 1 + ROLLER_SIZE_WARNING = 2 + with tempfile.NamedTemporaryFile(mode='w') as before_file: + before_file.write(_EXAMPLE_BLOBS_BEFORE) + before_file.flush() + blobs = binary_sizes.ReadPackageBlobsJson(before_file.name) + sizes = binary_sizes.GetPackageSizes(blobs) + binary_sizes.WritePackageSizesJson(before_file.name, sizes) + + # No change. + growth = binary_size_differ.ComputePackageDiffs(before_file.name, + before_file.name) + self.assertEqual(growth['status_code'], SUCCESS) + self.assertEqual(growth['compressed']['web_engine'], 0) + + after_file = tempfile.NamedTemporaryFile(mode='w', delete=True) + after_file.close() + try: + # Increase a blob, but below the limit. + other_sizes = copy.deepcopy(sizes) + self.ChangePackageSize(other_sizes, 'web_engine', 8 * 1024) + binary_sizes.WritePackageSizesJson(after_file.name, other_sizes) + + growth = binary_size_differ.ComputePackageDiffs(before_file.name, + after_file.name) + self.assertEqual(growth['status_code'], SUCCESS) + self.assertEqual(growth['compressed']['web_engine'], 8 * 1024) + + # Increase beyond the limit (adds another 8k) + self.ChangePackageSize(other_sizes, 'web_engine', 8 * 1024 + 1) + binary_sizes.WritePackageSizesJson(after_file.name, other_sizes) + growth = binary_size_differ.ComputePackageDiffs(before_file.name, + after_file.name) + self.assertEqual(growth['status_code'], FAILURE) + self.assertEqual(growth['compressed']['web_engine'], 16 * 1024 + 1) + self.assertIn('check failed', growth['summary']) + self.assertIn(f'web_engine (compressed) grew by {16 * 1024 + 1} bytes', + growth['summary']) + + # Increase beyond the limit, but compressed does not increase. + binary_sizes.WritePackageSizesJson(before_file.name, other_sizes) + self.ChangePackageSize(other_sizes, + 'web_engine', + 16 * 1024 + 1, + uncompressed_increase=0) + binary_sizes.WritePackageSizesJson(after_file.name, other_sizes) + growth = binary_size_differ.ComputePackageDiffs(before_file.name, + after_file.name) + self.assertEqual(growth['uncompressed']['web_engine'], SUCCESS) + self.assertEqual(growth['status_code'], SUCCESS) + self.assertEqual(growth['compressed']['web_engine'], 16 * 1024 + 1) + + # Increase beyond the limit, but compressed goes down. + binary_sizes.WritePackageSizesJson(before_file.name, other_sizes) + self.ChangePackageSize(other_sizes, + 'web_engine', + 16 * 1024 + 1, + uncompressed_increase=-4 * 1024) + binary_sizes.WritePackageSizesJson(after_file.name, other_sizes) + growth = binary_size_differ.ComputePackageDiffs(before_file.name, + after_file.name) + self.assertEqual(growth['status_code'], SUCCESS) + self.assertEqual(growth['compressed']['web_engine'], 16 * 1024 + 1) + + # Increase beyond the second limit. Fails, regardless of uncompressed. + binary_sizes.WritePackageSizesJson(before_file.name, other_sizes) + self.ChangePackageSize(other_sizes, + 'web_engine', + 100 * 1024 + 1, + uncompressed_increase=-4 * 1024) + binary_sizes.WritePackageSizesJson(after_file.name, other_sizes) + growth = binary_size_differ.ComputePackageDiffs(before_file.name, + after_file.name) + self.assertEqual(growth['status_code'], FAILURE) + self.assertEqual(growth['compressed']['web_engine'], 100 * 1024 + 1) + + # Increase beyond the second limit, but roller authored CL. + binary_sizes.WritePackageSizesJson(before_file.name, other_sizes) + self.ChangePackageSize(other_sizes, + 'web_engine', + 100 * 1024 + 1, + uncompressed_increase=-4 * 1024) + binary_sizes.WritePackageSizesJson(after_file.name, other_sizes) + growth = binary_size_differ.ComputePackageDiffs(before_file.name, + after_file.name, + author='big-autoroller') + self.assertEqual(growth['status_code'], ROLLER_SIZE_WARNING) + self.assertEqual(growth['compressed']['web_engine'], 100 * 1024 + 1) + self.assertNotIn('check failed', growth['summary']) + self.assertIn('growth by an autoroller will be ignored', + growth['summary']) + self.assertIn(f'web_engine (compressed) grew by {100 * 1024 + 1} bytes', + growth['summary']) + finally: + os.remove(after_file.name) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/fuchsia/binary_sizes.py b/build/fuchsia/binary_sizes.py index 52d05999d0b3..b1aa938c4f7d 100755 --- a/build/fuchsia/binary_sizes.py +++ b/build/fuchsia/binary_sizes.py @@ -1,18 +1,13 @@ -#!/usr/bin/env python2 +#!/usr/bin/env vpython3 # -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. '''Implements Chrome-Fuchsia package binary size checks.''' -from __future__ import division -from __future__ import print_function - import argparse import collections -import copy import json -import logging import math import os import re @@ -24,8 +19,13 @@ import traceback import uuid -from common import GetHostToolPathFromPlatform, GetHostArchFromPlatform -from common import SDK_ROOT, DIR_SOURCE_ROOT +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + 'test'))) + +from common import DIR_SRC_ROOT, SDK_ROOT, get_host_tool_path + +PACKAGES_BLOBS_FILE = 'package_blobs.json' +PACKAGES_SIZES_FILE = 'package_sizes.json' # Structure representing the compressed and uncompressed sizes for a Fuchsia # package. @@ -102,7 +102,9 @@ def CreateTestResults(test_status, timestamp): results = { 'tests': {}, 'interrupted': False, - 'path_delimiter': '.', + 'metadata': { + 'test_name_prefix': 'build/fuchsia/' + }, 'version': 3, 'seconds_since_epoch': timestamp, } @@ -182,6 +184,29 @@ def WriteGerritPluginSizeData(output_path, package_sizes): json.dump(sizes_data, sizes_file) +def ReadPackageBlobsJson(json_path): + """Reads package blob info from json file. + + Opens json file of blob info written by WritePackageBlobsJson, + and converts back into package blobs used in this script. + """ + with open(json_path, 'rt') as json_file: + formatted_blob_info = json.load(json_file) + + package_blobs = {} + for package in formatted_blob_info: + package_blobs[package] = {} + for blob_info in formatted_blob_info[package]: + blob = Blob(name=blob_info['path'], + hash=blob_info['merkle'], + uncompressed=blob_info['bytes'], + compressed=blob_info['size'], + is_counted=blob_info['is_counted']) + package_blobs[package][blob.name] = blob + + return package_blobs + + def WritePackageBlobsJson(json_path, package_blobs): """Writes package blob information in human-readable JSON format. @@ -200,8 +225,8 @@ def WritePackageBlobsJson(json_path, package_blobs): for blob_name in package_blobs[package]: blob = package_blobs[package][blob_name] blob_data.append({ - 'path': blob.name, - 'merkle': blob.hash, + 'path': str(blob.name), + 'merkle': str(blob.hash), 'bytes': blob.uncompressed, 'size': blob.compressed, 'is_counted': blob.is_counted @@ -212,10 +237,45 @@ def WritePackageBlobsJson(json_path, package_blobs): json.dump(formatted_blob_stats_per_package, json_file, indent=2) +def WritePackageSizesJson(json_path, package_sizes): + """Writes package sizes into a human-readable JSON format. + + JSON data is a dictionary of each package name being a key, with + the following keys within the sub-object: + 'compressed': compressed size of the package in bytes. + 'uncompressed': uncompressed size of the package in bytes. + """ + formatted_package_sizes = {} + for package, size_info in package_sizes.items(): + formatted_package_sizes[package] = { + 'uncompressed': size_info.uncompressed, + 'compressed': size_info.compressed + } + with (open(json_path, 'w')) as json_file: + json.dump(formatted_package_sizes, json_file, indent=2) + + +def ReadPackageSizesJson(json_path): + """Reads package_sizes from a given JSON file. + + Opens json file of blob info written by WritePackageSizesJson, + and converts back into package sizes used in this script. + """ + with open(json_path, 'rt') as json_file: + formatted_package_info = json.load(json_file) + + package_sizes = {} + for package, size_info in formatted_package_info.items(): + package_sizes[package] = PackageSizes( + compressed=size_info['compressed'], + uncompressed=size_info['uncompressed']) + return package_sizes + + def GetCompressedSize(file_path): """Measures file size after blobfs compression.""" - compressor_path = GetHostToolPathFromPlatform('blobfs-compression') + compressor_path = get_host_tool_path('blobfs-compression') try: temp_dir = tempfile.mkdtemp() compressed_file_path = os.path.join(temp_dir, os.path.basename(file_path)) @@ -228,7 +288,7 @@ def GetCompressedSize(file_path): stdout=subprocess.PIPE, stderr=subprocess.STDOUT) proc.wait() - compressor_output = proc.stdout.read() + compressor_output = proc.stdout.read().decode('utf-8') if proc.returncode != 0: print(compressor_output, file=sys.stderr) raise Exception('Error while running %s' % compressor_path) @@ -253,7 +313,7 @@ def GetCompressedSize(file_path): def ExtractFarFile(file_path, extract_dir): """Extracts contents of a Fuchsia archive file to the specified directory.""" - far_tool = GetHostToolPathFromPlatform('far') + far_tool = get_host_tool_path('far') if not os.path.isfile(far_tool): raise Exception('Could not find FAR host tool "%s".' % far_tool) @@ -318,7 +378,7 @@ def GetPackageMerkleRoot(far_file_path): """Returns a package's Merkle digest.""" # The digest is the first word on the first line of the merkle tool's output. - merkle_tool = GetHostToolPathFromPlatform('merkleroot') + merkle_tool = get_host_tool_path('merkleroot') output = subprocess.check_output([merkle_tool, far_file_path]) return output.splitlines()[0].split()[0] @@ -407,7 +467,8 @@ def GetPackageSizes(package_blobs): blob_counts = collections.defaultdict(int) for package_name in package_blobs: for blob_name in package_blobs[package_name]: - blob_counts[blob_name] += 1 + blob = package_blobs[package_name][blob_name] + blob_counts[blob.hash] += 1 # Package sizes are the sum of blob sizes divided by their share counts. package_sizes = {} @@ -417,7 +478,7 @@ def GetPackageSizes(package_blobs): for blob_name in package_blobs[package_name]: blob = package_blobs[package_name][blob_name] if blob.is_counted: - count = blob_counts[blob_name] + count = blob_counts[blob.hash] compressed_total += blob.compressed // count uncompressed_total += blob.uncompressed // count package_sizes[package_name] = PackageSizes(compressed_total, @@ -469,8 +530,7 @@ def main(): ) parser.add_argument( '--sizes-path', - default=os.path.join('fuchsia', 'release', 'size_tests', - 'fyi_sizes.json'), + default=os.path.join('tools', 'fuchsia', 'size_tests', 'fyi_sizes.json'), help='path to package size limits json file. The path is relative to ' 'the workspace src directory') parser.add_argument('--verbose', @@ -494,7 +554,7 @@ def main(): raise Exception('Could not find build output directory "%s".' % args.build_out_dir) - with open(os.path.join(DIR_SOURCE_ROOT, args.sizes_path)) as sizes_file: + with open(os.path.join(DIR_SRC_ROOT, args.sizes_path)) as sizes_file: sizes_config = json.load(sizes_file) if args.verbose: @@ -540,7 +600,9 @@ def main(): with open(os.path.join(results_directory, 'perf_results.json'), 'w') as f: json.dump(sizes_histogram, f) WritePackageBlobsJson( - os.path.join(results_directory, 'package_blobs.json'), package_blobs) + os.path.join(results_directory, PACKAGES_BLOBS_FILE), package_blobs) + WritePackageSizesJson( + os.path.join(results_directory, PACKAGES_SIZES_FILE), package_sizes) if args.isolated_script_test_output: WriteTestResults(args.isolated_script_test_output, test_completed, diff --git a/build/fuchsia/binary_sizes_test.py b/build/fuchsia/binary_sizes_test.py index 962e4c912384..2f9dcf2177f5 100755 --- a/build/fuchsia/binary_sizes_test.py +++ b/build/fuchsia/binary_sizes_test.py @@ -1,20 +1,51 @@ -#!/usr/bin/env python -# Copyright 2020 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -import copy -import math +import json import os import shutil -import subprocess import tempfile -import time import unittest import binary_sizes -from common import DIR_SOURCE_ROOT + +_EXAMPLE_BLOBS = """ +{ + "web_engine": [ + { + "merkle": "77e876447dd2daaaab7048d646e87fe8b6d9fecef6cbfcc4af30b8fbfa50b881", + "path": "locales/ta.pak", + "bytes": 17916, + "is_counted": true, + "size": 16384 + }, + { + "merkle": "5f1932b8c9fe954f3c3fdb34ab2089d2af34e5a0cef90cad41a1cd37d92234bf", + "path": "lib/libEGL.so", + "bytes": 226960, + "is_counted": true, + "size": 90112 + }, + { + "merkle": "9822fc0dd95cdd1cc46b5c6632a928a6ad19b76ed0157397d82a2f908946fc34", + "path": "meta.far", + "bytes": 24576, + "is_counted": true, + "size": 16384 + }, + { + "merkle": "090aed4593c4f7d04a3ad80e9971c0532dd5b1d2bdf4754202cde510a88fd220", + "path": "locales/ru.pak", + "bytes": 11903, + "is_counted": true, + "size": 16384 + } + ] +} +""" class TestBinarySizes(unittest.TestCase): @@ -28,33 +59,73 @@ def setUpClass(cls): def tearDownClass(cls): shutil.rmtree(cls.tmpdir) - # TODO(crbug.com/1145648): Add tests covering FAR file input and histogram - # output. - - def testCommitFromBuildProperty(self): - commit_position = binary_sizes.CommitPositionFromBuildProperty( - 'refs/heads/master@{#819458}') - self.assertEqual(commit_position, 819458) - - def testCompressedSize(self): - """Verifies that the compressed file size can be extracted from the - blobfs-compression output.""" - - uncompressed_file = tempfile.NamedTemporaryFile(delete=False) - for line in range(200): - uncompressed_file.write( - 'Lorem ipsum dolor sit amet, consectetur adipiscing elit. ' - 'Sed eleifend') - uncompressed_file.close() - compressed_path = uncompressed_file.name + '.compressed' - compressor_path = os.path.join(DIR_SOURCE_ROOT, 'third_party', - 'fuchsia-sdk', 'sdk', 'tools', 'x64', - 'blobfs-compression') - subprocess.call([compressor_path, uncompressed_file.name, compressed_path]) - self.assertEqual(binary_sizes.CompressedSize(uncompressed_file.name), - os.path.getsize(compressed_path)) - os.remove(uncompressed_file.name) - os.remove(compressed_path) + + def testReadAndWritePackageBlobs(self): + # TODO(1309977): Disabled on Windows because Windows doesn't allow opening a + # NamedTemporaryFile by name. + if os.name == 'nt': + return + with tempfile.NamedTemporaryFile(mode='w') as tmp_file: + tmp_file.write(_EXAMPLE_BLOBS) + tmp_file.flush() + + package_blobs = binary_sizes.ReadPackageBlobsJson(tmp_file.name) + + tmp_package_file = tempfile.NamedTemporaryFile(mode='w', delete=False) + tmp_package_file.close() + try: + binary_sizes.WritePackageBlobsJson(tmp_package_file.name, package_blobs) + + self.assertEqual(binary_sizes.ReadPackageBlobsJson(tmp_package_file.name), + package_blobs) + finally: + os.remove(tmp_package_file.name) + + def testReadAndWritePackageSizes(self): + # TODO(1309977): Disabled on Windows because Windows doesn't allow opening a + # NamedTemporaryFile by name. + if os.name == 'nt': + return + with tempfile.NamedTemporaryFile(mode='w') as tmp_file: + tmp_file.write(_EXAMPLE_BLOBS) + tmp_file.flush() + blobs = binary_sizes.ReadPackageBlobsJson(tmp_file.name) + + sizes = binary_sizes.GetPackageSizes(blobs) + + new_sizes = {} + with tempfile.NamedTemporaryFile(mode='w') as tmp_file: + binary_sizes.WritePackageSizesJson(tmp_file.name, sizes) + new_sizes = binary_sizes.ReadPackageSizesJson(tmp_file.name) + self.assertEqual(new_sizes, sizes) + self.assertIn('web_engine', new_sizes) + + def testGetPackageSizesUsesBlobMerklesForCount(self): + # TODO(1309977): Disabled on Windows because Windows doesn't allow opening a + # NamedTemporaryFile by name. + if os.name == 'nt': + return + blobs = json.loads(_EXAMPLE_BLOBS) + + # Make a duplicate of the last blob. + last_blob = dict(blobs['web_engine'][-1]) + blobs['cast_runner'] = [] + last_blob['path'] = 'foo' # Give a non-sense name, but keep merkle. + + # If the merkle is the same, the blob_count increases by 1. + # This effectively reduces the size of the blobs size by half. + # In both packages, despite it appearing in both and under different + # names. + blobs['cast_runner'].append(last_blob) + + with tempfile.NamedTemporaryFile(mode='w') as tmp_file: + tmp_file.write(json.dumps(blobs)) + tmp_file.flush() + blobs = binary_sizes.ReadPackageBlobsJson(tmp_file.name) + + sizes = binary_sizes.GetPackageSizes(blobs) + + self.assertEqual(sizes['cast_runner'].compressed, last_blob['size'] / 2) if __name__ == '__main__': diff --git a/build/fuchsia/boot_data.py b/build/fuchsia/boot_data.py deleted file mode 100644 index bac60ec8c6cf..000000000000 --- a/build/fuchsia/boot_data.py +++ /dev/null @@ -1,114 +0,0 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Functions used to provision Fuchsia boot images.""" - -import common -import logging -import os -import subprocess -import tempfile -import time -import uuid - -_SSH_CONFIG_TEMPLATE = """ -Host * - CheckHostIP no - StrictHostKeyChecking no - ForwardAgent no - ForwardX11 no - UserKnownHostsFile {known_hosts} - User fuchsia - IdentitiesOnly yes - IdentityFile {identity} - ServerAliveInterval 2 - ServerAliveCountMax 5 - ControlMaster auto - ControlPersist 1m - ControlPath /tmp/ssh-%r@%h:%p - ConnectTimeout 5 - """ - -# Specifies boot files intended for use by an emulator. -TARGET_TYPE_QEMU = 'qemu' - -# Specifies boot files intended for use by anything (incl. physical devices). -TARGET_TYPE_GENERIC = 'generic' - -def _GetPubKeyPath(output_dir): - """Returns a path to the generated SSH public key.""" - - return os.path.join(output_dir, 'id_ed25519.pub') - - -def ProvisionSSH(output_dir): - """Generates a keypair and config file for SSH.""" - - host_key_path = os.path.join(output_dir, 'ssh_key') - host_pubkey_path = host_key_path + '.pub' - id_key_path = os.path.join(output_dir, 'id_ed25519') - id_pubkey_path = _GetPubKeyPath(output_dir) - known_hosts_path = os.path.join(output_dir, 'known_hosts') - ssh_config_path = os.path.join(output_dir, 'ssh_config') - - logging.debug('Generating SSH credentials.') - if not os.path.isfile(host_key_path): - subprocess.check_call(['ssh-keygen', '-t', 'ed25519', '-h', '-f', - host_key_path, '-P', '', '-N', ''], - stdout=open(os.devnull)) - if not os.path.isfile(id_key_path): - subprocess.check_call(['ssh-keygen', '-t', 'ed25519', '-f', id_key_path, - '-P', '', '-N', ''], stdout=open(os.devnull)) - - with open(ssh_config_path, "w") as ssh_config: - ssh_config.write( - _SSH_CONFIG_TEMPLATE.format(identity=id_key_path, - known_hosts=known_hosts_path)) - - if os.path.exists(known_hosts_path): - os.remove(known_hosts_path) - - -def GetTargetFile(filename, target_arch, target_type): - """Computes a path to |filename| in the Fuchsia boot image directory specific - to |target_type| and |target_arch|.""" - - assert target_type == TARGET_TYPE_QEMU or target_type == TARGET_TYPE_GENERIC - - return os.path.join(common.IMAGES_ROOT, target_arch, target_type, filename) - - -def GetSSHConfigPath(output_dir): - return output_dir + '/ssh_config' - - -def GetBootImage(output_dir, target_arch, target_type): - """"Gets a path to the Zircon boot image, with the SSH client public key - added.""" - - ProvisionSSH(output_dir) - pubkey_path = _GetPubKeyPath(output_dir) - zbi_tool = common.GetHostToolPathFromPlatform('zbi') - image_source_path = GetTargetFile('zircon-a.zbi', target_arch, target_type) - image_dest_path = os.path.join(output_dir, 'gen', 'fuchsia-with-keys.zbi') - - cmd = [ zbi_tool, '-o', image_dest_path, image_source_path, - '-e', 'data/ssh/authorized_keys=' + pubkey_path ] - subprocess.check_call(cmd) - - return image_dest_path - - -def GetKernelArgs(output_dir): - return ['devmgr.epoch=%d' % time.time()] - - -def AssertBootImagesExist(arch, platform): - assert os.path.exists(GetTargetFile('zircon-a.zbi', arch, platform)), \ - 'This checkout is missing the files necessary for\n' \ - 'booting this configuration of Fuchsia.\n' \ - 'To check out the files, add this entry to the "custom_vars"\n' \ - 'section of your .gclient file:\n\n' \ - ' "checkout_fuchsia_boot_images": "%s.%s"\n\n' % \ - (platform, arch) diff --git a/build/fuchsia/cipd/BUILD.gn b/build/fuchsia/cipd/BUILD.gn new file mode 100644 index 000000000000..0019b8645b8d --- /dev/null +++ b/build/fuchsia/cipd/BUILD.gn @@ -0,0 +1,436 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Build targets for constructing CIPD release archives. + +assert(is_fuchsia) + +import("//build/cipd/cipd.gni") +import("//build/config/chrome_build.gni") +import("//build/util/process_version.gni") +import("//third_party/fuchsia-sdk/sdk/build/build_id_dir.gni") +import("//third_party/fuchsia-sdk/sdk/build/cipd.gni") + +visibility = [ ":*" ] + +# Allows a builder to explicitly declare the CIPD path. The base path is what +# comes after `.../p/` in the CIPD URL. +declare_args() { + fuchsia_cipd_package_base_path = "" +} + +# TODO(zijiehe): Eliminate the use of 'package_base_path' during the +# refactoring. +if (fuchsia_cipd_package_base_path == "") { + if (is_chrome_branded) { + package_base_path = "chrome_internal/fuchsia" + } else { + package_base_path = "chromium/fuchsia" + } +} else { + package_base_path = fuchsia_cipd_package_base_path +} + +# Archives related specifically to `fuchsia.web` +_web_engine_directory = "web_engine" + +# Archives related specifically to Chrome browser. +_chrome_directory = "chrome" + +# Archives of tools intended to be run on a Linux/Mac host rather than the +# Fuchsia device. +_host_tools_directory = "host_tools" + +_archive_suffix = "_archive" + +# Extracts the numeric Chrome version and writes it to a file in the output +# directory. +# +# To check out the repository on the commit where the version was generated, +# simply call `git checkout `, and Git will check out the commit +# associated with the tag. +process_version("version") { + template_file = "version.template" + sources = [ "//chrome/VERSION" ] + output = "${target_gen_dir}/VERSION" + process_only = true +} + +if (target_cpu == "x64") { + targetarch = "amd64" +} else { + targetarch = "arm64" +} + +# Prepares a CIPD archive, produces a corresponding LICENSE file, +# LICENSE.spdx.json file and generates a manifest file. +# +# Parameters: +# package_subdirectory: Specify the subdirectory relative to +# |package_base_path| in which the package is put. +# description: Sets the "description" field in CIPD package definition. +# +# Optional parameters used directly by fuchsia_cipd_package template: +# "install_mode", +# "sources", +# "data", +# "data_deps" +# "deps", +# "testonly", + +template("cipd_archive") { + forward_variables_from(invoker, + [ + "package_subdirectory", + "description", + "install_mode", + "sources", + "data", + "data_deps", + "deps", + "testonly", + ]) + + _license_path = "${target_gen_dir}/${target_name}/LICENSE" + _invoker_dir = get_label_info(":${invoker.target_name}", "dir") + _license_target = "${_invoker_dir}:${invoker.target_name}${_archive_suffix}" + + # GN is used by the script and is thus an input. + if (host_os == "mac") { + _gn_path = "//buildtools/mac/gn" + } else if (host_os == "linux") { + _gn_path = "//buildtools/linux64/gn" + } + + # Produces a consolidated license file. + action("${target_name}_license") { + script = "//tools/licenses/licenses.py" + inputs = [ "$_gn_path" ] + outputs = [ _license_path ] + args = [ + "license_file", + rebase_path(_license_path, root_build_dir), + "--gn-target", + _license_target, + "--gn-out-dir", + ".", + ] + } + + # Produces a consolidated license file in spdx format. + action("${target_name}_license_spdx") { + _license_path_spdx = "${_license_path}.spdx.json" + + script = "//tools/licenses/licenses.py" + inputs = [ "$_gn_path" ] + outputs = [ _license_path_spdx ] + args = [ + "license_file", + rebase_path(_license_path_spdx, root_build_dir), + "--gn-target", + _license_target, + "--gn-out-dir", + ".", + "--format", + "spdx", + "--spdx-doc-name", + "${invoker.target_name}", + ] + } + + if (!defined(deps)) { + deps = [] + } + deps += [ + ":${target_name}_license", + ":${target_name}_license_spdx", + ":version", + ] + + if (!defined(sources)) { + sources = [] + } + sources += get_target_outputs(":${target_name}_license") + + get_target_outputs(":${target_name}_license_spdx") + + [ "${target_gen_dir}/VERSION" ] + + fuchsia_cipd_package("${target_name}${_archive_suffix}") { + package = "${package_base_path}/${package_subdirectory}/${targetarch}/${invoker.target_name}" + package_root = "${target_gen_dir}/${invoker.target_name}" + package_definition_name = "${invoker.target_name}.yaml" + + # Always use absolute path. + use_absolute_root_path = true + } +} + +# Prepares a CIPD test archive, which is a regular CIPD archive that generates +# test manifests for a given list of test_sets. +# +# Parameters: +# test_sets: A list of scopes for which test manifests will be created. Each +# set contains: +# manifest_path: The path to the generated manifest JSON file. +# far_sources: An optional list of CFv2 test component .far files. +# +# Required parameters used by the cipd_archive template: +# "package_subdirectory", +# +# Optional parameters used by the cipd_archive template: +# "description" +# "install_mode", +# "data", +# "data_deps" +# "deps", +# "testonly", + +template("cipd_test_archive") { + forward_variables_from(invoker, + [ + "package_subdirectory", + "description", + "install_mode", + "data", + "data_deps", + "deps", + "testonly", + "test_sets", + ]) + + assert(defined(test_sets) && defined(testonly) && testonly == true) + + cipd_archive(target_name) { + # Build JSON manifests for each suite of tests and include them in the + # archive. + sources = [] + foreach(test_set, test_sets) { + assert(defined(test_set.far_sources)) + sources += [ test_set.manifest_path ] + _manifest_contents = [] + if (defined(test_set.far_sources)) { + foreach(source, test_set.far_sources) { + package_name = get_path_info(source, "name") + + _manifest_contents += [ + { + package = package_name + component_name = package_name + ".cm" + }, + ] + } + sources += test_set.far_sources + } + write_file(test_set.manifest_path, _manifest_contents, "json") + } + } +} + +cipd_archive("web_engine") { + package_subdirectory = _web_engine_directory + description = "Prebuilt WebEngine binaries for Fuchsia." + + deps = [ "//fuchsia_web/webengine:web_engine" ] + sources = + [ "${root_gen_dir}/fuchsia_web/webengine/web_engine/web_engine.far" ] +} + +cipd_archive("cast_runner") { + package_subdirectory = _web_engine_directory + description = "Prebuilt Cast application Runner binaries for Fuchsia." + + deps = [ "//fuchsia_web/runners:cast_runner_pkg" ] + sources = + [ "${root_gen_dir}/fuchsia_web/runners/cast_runner/cast_runner.far" ] +} + +cipd_archive("web_engine_shell") { + package_subdirectory = _web_engine_directory + description = "Simple command-line embedder for WebEngine." + testonly = true + + deps = [ "//fuchsia_web/shell:web_engine_shell_pkg" ] + sources = [ + "${root_gen_dir}/fuchsia_web/shell/web_engine_shell/web_engine_shell.far", + ] +} + +_stripped_chromedriver_file = "${root_out_dir}/clang_x64/stripped/chromedriver" + +action("strip_chromedriver_binary") { + testonly = true + + prog_name = "${root_out_dir}/clang_x64/chromedriver" + + deps = [ "//chrome/test/chromedriver:chromedriver_server($host_toolchain)" ] + script = "//build/gn_run_binary.py" + sources = [ + "//buildtools/third_party/eu-strip/bin/eu-strip", + prog_name, + ] + outputs = [ _stripped_chromedriver_file ] + args = [ + rebase_path("//buildtools/third_party/eu-strip/bin/eu-strip", + root_build_dir), + "-o", + rebase_path(_stripped_chromedriver_file, root_build_dir), + rebase_path(prog_name, root_build_dir), + ] +} + +cipd_archive("chromedriver") { + package_subdirectory = "${_host_tools_directory}/\${os}" + description = "Prebuilt Chromedriver binary for Fuchsia host." + install_mode = "copy" + testonly = true + + deps = [ ":strip_chromedriver_binary" ] + sources = [ _stripped_chromedriver_file ] +} + +cipd_test_archive("tests") { + package_subdirectory = _web_engine_directory + description = "Prebuilt Chromium tests for Fuchsia." + testonly = true + + deps = [ + "//base:base_unittests_pkg", + "//fuchsia_web/runners:cast_runner_integration_tests_pkg", + "//fuchsia_web/webengine:web_engine_integration_tests_pkg", + "//ipc:ipc_tests_pkg", + "//media:media_unittests_pkg", + "//mojo:mojo_unittests_pkg", + "//skia:skia_unittests_pkg", + "//third_party/blink/common:blink_common_unittests_pkg", + ] + + test_sets = [ + { + manifest_path = "${target_gen_dir}/test_manifest.json" + far_sources = [ + "${root_gen_dir}/base/base_unittests/base_unittests.far", + "${root_gen_dir}/fuchsia_web/runners/cast_runner_integration_tests/cast_runner_integration_tests.far", + "${root_gen_dir}/fuchsia_web/webengine/web_engine_integration_tests/web_engine_integration_tests.far", + "${root_gen_dir}/ipc/ipc_tests/ipc_tests.far", + "${root_gen_dir}/media/media_unittests/media_unittests.far", + "${root_gen_dir}/mojo/mojo_unittests/mojo_unittests.far", + "${root_gen_dir}/skia/skia_unittests/skia_unittests.far", + "${root_gen_dir}/third_party/blink/common/blink_common_unittests/blink_common_unittests.far", + ] + }, + { + manifest_path = "${target_gen_dir}/common_tests_manifest.json" + far_sources = [ + "${root_gen_dir}/base/base_unittests/base_unittests.far", + "${root_gen_dir}/ipc/ipc_tests/ipc_tests.far", + "${root_gen_dir}/media/media_unittests/media_unittests.far", + "${root_gen_dir}/mojo/mojo_unittests/mojo_unittests.far", + "${root_gen_dir}/skia/skia_unittests/skia_unittests.far", + "${root_gen_dir}/third_party/blink/common/blink_common_unittests/blink_common_unittests.far", + ] + }, + { + manifest_path = "${target_gen_dir}/web_engine_tests_manifest.json" + far_sources = [ "${root_gen_dir}/fuchsia_web/webengine/web_engine_integration_tests/web_engine_integration_tests.far" ] + }, + { + manifest_path = "${target_gen_dir}/cast_runner_tests_manifest.json" + far_sources = [ "${root_gen_dir}/fuchsia_web/runners/cast_runner_integration_tests/cast_runner_integration_tests.far" ] + }, + ] +} + +# Construct a consolidated directory of web_engine debugging symbols using the +# GNU .build_id structure for CIPD archival. +_web_engine_build_ids_target = "web_engine_debug_symbol_directory" +_web_engine_debug_symbols_archive_name = "web_engine_debug_symbols" +_web_engine_debug_symbols_outdir = "${target_gen_dir}/${_web_engine_debug_symbols_archive_name}/${_web_engine_build_ids_target}" + +build_id_dir(_web_engine_build_ids_target) { + testonly = true # Some of the archives contain test packages. + output_path = _web_engine_debug_symbols_outdir + deps = [ ":web_engine_archives_with_tests" ] +} + +fuchsia_cipd_package(_web_engine_debug_symbols_archive_name) { + testonly = true + package = "${package_base_path}/${_web_engine_directory}/${targetarch}/debug-symbols" + package_root = _web_engine_debug_symbols_outdir + package_definition_name = "${target_name}.yaml" + package_definition_dir = "${target_gen_dir}/${target_name}" + description = "Debugging symbols for prebuilt binaries from Chromium." + use_absolute_root_path = true + + directories = [ "." ] + deps = [ ":${_web_engine_build_ids_target}" ] +} + +cipd_archive("chrome") { + package_subdirectory = _chrome_directory + description = "Prebuilt Chrome browser package." + + deps = [ "//chrome/app:chrome_pkg" ] + sources = [ "${root_gen_dir}/chrome/app/chrome/chrome.far" ] +} + +_chrome_build_ids_target = "chrome_debug_symbol_directory" +_chrome_debug_symbols_archive_name = "chrome_debug_symbols" +_chrome_debug_symbols_outdir = "${target_gen_dir}/${_chrome_debug_symbols_archive_name}/${_chrome_build_ids_target}" + +build_id_dir(_chrome_build_ids_target) { + testonly = true # Some of the archives contain test packages. + output_path = _chrome_debug_symbols_outdir + deps = [ ":chrome${_archive_suffix}" ] +} + +fuchsia_cipd_package(_chrome_debug_symbols_archive_name) { + testonly = true + package = + "${package_base_path}/${_chrome_directory}/${targetarch}/debug-symbols" + package_root = _chrome_debug_symbols_outdir + package_definition_name = "${target_name}.yaml" + package_definition_dir = "${target_gen_dir}/${target_name}" + description = "Debugging symbols for prebuilt binaries from Chromium." + use_absolute_root_path = true + + directories = [ "." ] + deps = [ ":${_chrome_build_ids_target}" ] +} + +# A group for production archives to ensure nothing is testonly. +group("web_engine_production_archives") { + deps = [ + ":cast_runner${_archive_suffix}", + ":web_engine${_archive_suffix}", + ] +} + +# Used by both the main group as well as :debug_symbols. +group("web_engine_archives_with_tests") { + testonly = true # tests and web_engine_shell are testonly. + deps = [ + ":tests${_archive_suffix}", + ":web_engine_production_archives", + ":web_engine_shell${_archive_suffix}", + ] +} + +# TODO(zijiehe): Rename to "cipd_yaml" when possible. +# This target only creates yaml files and related archives for cipd rather +# than executing the cipd instance to upload them. +# Currently it's named as "cipd" to match the folder name which introduces +# confusions. +group("cipd") { + testonly = true # Some archives are testonly. + deps = [ + ":chrome${_archive_suffix}", + ":chrome_debug_symbols", + ":web_engine_archives_with_tests", + + # Symbols are not uploaded for the following. + ":chromedriver${_archive_suffix}", + ":web_engine_debug_symbols", + ] + visibility = [] # Required to replace the file default. + visibility = [ "//:gn_all" ] +} diff --git a/build/fuchsia/cipd/DIR_METADATA b/build/fuchsia/cipd/DIR_METADATA new file mode 100644 index 000000000000..210aa6a954b8 --- /dev/null +++ b/build/fuchsia/cipd/DIR_METADATA @@ -0,0 +1 @@ +mixins: "//build/fuchsia/COMMON_METADATA" diff --git a/build/fuchsia/cipd/README.md b/build/fuchsia/cipd/README.md new file mode 100644 index 000000000000..c0de38b3c015 --- /dev/null +++ b/build/fuchsia/cipd/README.md @@ -0,0 +1,11 @@ +# CIPD recipes + +The `//build/fuchsia/cipd` target generates a number of YAML files that are used to +produce archives that are uploaded to CIPD. The generated YAML files are stored +in the output directory under the path `gen/build/fuchsia/cipd/`. + +## Example usage + +The most recent package can be discovered by searching for the "canary" ref: + +`$ cipd describe chromium/fuchsia/$PACKAGE_NAME-$TARGET_ARCH -version canary` diff --git a/build/fuchsia/cipd/version.template b/build/fuchsia/cipd/version.template new file mode 100644 index 000000000000..32a49a4aef83 --- /dev/null +++ b/build/fuchsia/cipd/version.template @@ -0,0 +1 @@ +@MAJOR@.@MINOR@.@BUILD@.@PATCH@ \ No newline at end of file diff --git a/build/fuchsia/common.py b/build/fuchsia/common.py deleted file mode 100644 index 99ced81ee986..000000000000 --- a/build/fuchsia/common.py +++ /dev/null @@ -1,140 +0,0 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import logging -import os -import platform -import signal -import socket -import subprocess -import sys -import time -import threading - -DIR_SOURCE_ROOT = os.path.abspath( - os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) -IMAGES_ROOT = os.path.join( - DIR_SOURCE_ROOT, 'third_party', 'fuchsia-sdk', 'images') -SDK_ROOT = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'fuchsia-sdk', 'sdk') - -def EnsurePathExists(path): - """Checks that the file |path| exists on the filesystem and returns the path - if it does, raising an exception otherwise.""" - - if not os.path.exists(path): - raise IOError('Missing file: ' + path) - - return path - -def GetHostOsFromPlatform(): - host_platform = sys.platform - if host_platform.startswith('linux'): - return 'linux' - elif host_platform.startswith('darwin'): - return 'mac' - raise Exception('Unsupported host platform: %s' % host_platform) - -def GetHostArchFromPlatform(): - host_arch = platform.machine() - if host_arch == 'x86_64': - return 'x64' - elif host_arch == 'aarch64': - return 'arm64' - raise Exception('Unsupported host architecture: %s' % host_arch) - -def GetHostToolPathFromPlatform(tool): - host_arch = platform.machine() - return os.path.join(SDK_ROOT, 'tools', GetHostArchFromPlatform(), tool) - - -def GetEmuRootForPlatform(emulator): - return os.path.join( - DIR_SOURCE_ROOT, 'third_party', '{0}-{1}-{2}'.format( - emulator, GetHostOsFromPlatform(), GetHostArchFromPlatform())) - - -def ConnectPortForwardingTask(target, local_port, remote_port = 0): - """Establishes a port forwarding SSH task to a localhost TCP endpoint hosted - at port |local_port|. Blocks until port forwarding is established. - - Returns the remote port number.""" - - forwarding_flags = ['-O', 'forward', # Send SSH mux control signal. - '-R', '%d:localhost:%d' % (remote_port, local_port), - '-v', # Get forwarded port info from stderr. - '-NT'] # Don't execute command; don't allocate terminal. - - if remote_port != 0: - # Forward to a known remote port. - task = target.RunCommand([], ssh_args=forwarding_flags) - if task.returncode != 0: - raise Exception('Could not establish a port forwarding connection.') - return - - task = target.RunCommandPiped([], - ssh_args=forwarding_flags, - stdout=subprocess.PIPE, - stderr=open('/dev/null')) - output = task.stdout.readlines() - task.wait() - if task.returncode != 0: - raise Exception('Got an error code when requesting port forwarding: %d' % - task.returncode) - - parsed_port = int(output[0].strip()) - logging.debug('Port forwarding established (local=%d, device=%d)' % - (local_port, parsed_port)) - return parsed_port - - -def GetAvailableTcpPort(): - """Finds a (probably) open port by opening and closing a listen socket.""" - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.bind(("", 0)) - port = sock.getsockname()[1] - sock.close() - return port - - -def SubprocessCallWithTimeout(command, silent=False, timeout_secs=None): - """Helper function for running a command. - - Args: - command: The command to run. - silent: If true, stdout and stderr of the command will not be printed. - timeout_secs: Maximum amount of time allowed for the command to finish. - - Returns: - A tuple of (return code, stdout, stderr) of the command. Raises - an exception if the subprocess times out. - """ - - if silent: - devnull = open(os.devnull, 'w') - process = subprocess.Popen(command, stdout=devnull, stderr=devnull) - else: - process = subprocess.Popen(command, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - timeout_timer = None - if timeout_secs: - - def interrupt_process(): - process.send_signal(signal.SIGKILL) - - timeout_timer = threading.Timer(timeout_secs, interrupt_process) - - # Ensure that keyboard interrupts are handled properly (crbug/1198113). - timeout_timer.daemon = True - - timeout_timer.start() - - out, err = process.communicate() - if timeout_timer: - timeout_timer.cancel() - - if process.returncode == -9: - raise Exception('Timeout when executing \"%s\".' % ' '.join(command)) - - return process.returncode, out, err diff --git a/build/fuchsia/common_args.py b/build/fuchsia/common_args.py deleted file mode 100644 index 877beaafa99e..000000000000 --- a/build/fuchsia/common_args.py +++ /dev/null @@ -1,173 +0,0 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import argparse -import importlib -import logging -import os -import sys - -from common import GetHostArchFromPlatform - -BUILTIN_TARGET_NAMES = ['aemu', 'qemu', 'device'] - - -def _AddTargetSpecificationArgs(arg_parser): - """Returns a parser that handles the target type used for the test run.""" - - device_args = arg_parser.add_argument_group( - 'target', - 'Arguments specifying the Fuchsia target type. To see a list of ' - 'arguments available for a specific target type, specify the desired ' - 'target to use and add the --help flag.') - device_args.add_argument('--target-cpu', - default=GetHostArchFromPlatform(), - help='GN target_cpu setting for the build. Defaults ' - 'to the same architecture as host cpu.') - device_args.add_argument('--device', - default=None, - choices=BUILTIN_TARGET_NAMES + ['custom'], - help='Choose to run on aemu|qemu|device. ' - 'By default, Fuchsia will run on AEMU on x64 ' - 'hosts and QEMU on arm64 hosts. Alternatively, ' - 'setting to custom will require specifying the ' - 'subclass of Target class used via the ' - '--custom-device-target flag.') - device_args.add_argument('-d', - action='store_const', - dest='device', - const='device', - help='Run on device instead of emulator.') - device_args.add_argument('--custom-device-target', - default=None, - help='Specify path to file that contains the ' - 'subclass of Target that will be used. Only ' - 'needed if device specific operations such as ' - 'paving is required.') - - -def _GetPathToBuiltinTarget(target_name): - return '%s_target' % target_name - - -def _LoadTargetClass(target_path): - try: - loaded_target = importlib.import_module(target_path) - except ImportError: - logging.error( - 'Cannot import from %s. Make sure that --custom-device-target ' - 'is pointing to a file containing a target ' - 'module.' % target_path) - raise - return loaded_target.GetTargetType() - - -def AddCommonArgs(arg_parser): - """Adds command line arguments to |arg_parser| for options which are shared - across test and executable target types. - - Args: - arg_parser: an ArgumentParser object.""" - - common_args = arg_parser.add_argument_group('common', 'Common arguments') - common_args.add_argument('--runner-logs-dir', - help='Directory to write test runner logs to.') - common_args.add_argument('--exclude-system-logs', - action='store_false', - dest='include_system_logs', - help='Do not show system log data.') - common_args.add_argument('--verbose', - '-v', - default=False, - action='store_true', - help='Enable debug-level logging.') - common_args.add_argument( - '--out-dir', - type=os.path.realpath, - help=('Path to the directory in which build files are located. ' - 'Defaults to current directory.')) - common_args.add_argument('--system-log-file', - help='File to write system logs to. Specify ' - '\'-\' to log to stdout.') - common_args.add_argument('--fuchsia-out-dir', - help='Path to a Fuchsia build output directory. ' - 'Setting the GN arg ' - '"default_fuchsia_build_dir_for_installation" ' - 'will cause it to be passed here.') - - package_args = arg_parser.add_argument_group('package', 'Fuchsia Packages') - package_args.add_argument( - '--package', - action='append', - help='Paths of the packages to install, including ' - 'all dependencies.') - package_args.add_argument( - '--package-name', - help='Name of the package to execute, defined in ' + 'package metadata.') - - emu_args = arg_parser.add_argument_group('emu', 'General emulator arguments') - emu_args.add_argument('--cpu-cores', - type=int, - default=4, - help='Sets the number of CPU cores to provide.') - emu_args.add_argument('--ram-size-mb', - type=int, - default=2048, - help='Sets the emulated RAM size (MB).'), - emu_args.add_argument('--allow-no-kvm', - action='store_false', - dest='require_kvm', - default=True, - help='Do not require KVM acceleration for ' - 'emulators.') - - -# Register the arguments for all known target types and the optional custom -# target type (specified on the commandline). -def AddTargetSpecificArgs(arg_parser): - # Parse the minimal set of arguments to determine if custom targets need to - # be loaded so that their arguments can be registered. - target_spec_parser = argparse.ArgumentParser(add_help=False) - _AddTargetSpecificationArgs(target_spec_parser) - target_spec_args, _ = target_spec_parser.parse_known_args() - _AddTargetSpecificationArgs(arg_parser) - - for target in BUILTIN_TARGET_NAMES: - _LoadTargetClass(_GetPathToBuiltinTarget(target)).RegisterArgs(arg_parser) - if target_spec_args.custom_device_target: - _LoadTargetClass( - target_spec_args.custom_device_target).RegisterArgs(arg_parser) - - -def ConfigureLogging(args): - """Configures the logging level based on command line |args|.""" - - logging.basicConfig(level=(logging.DEBUG if args.verbose else logging.INFO), - format='%(asctime)s:%(levelname)s:%(name)s:%(message)s') - - # The test server spawner is too noisy with INFO level logging, so tweak - # its verbosity a bit by adjusting its logging level. - logging.getLogger('chrome_test_server_spawner').setLevel( - logging.DEBUG if args.verbose else logging.WARN) - - # Verbose SCP output can be useful at times but oftentimes is just too noisy. - # Only enable it if -vv is passed. - logging.getLogger('ssh').setLevel( - logging.DEBUG if args.verbose else logging.WARN) - - -def GetDeploymentTargetForArgs(args): - """Constructs a deployment target object using command line arguments. - If needed, an additional_args dict can be used to supplement the - command line arguments.""" - - if args.device == 'custom': - return _LoadTargetClass(args.custom_device_target).CreateFromArgs(args) - - if args.device: - device = args.device - else: - device = 'aemu' if args.target_cpu == 'x64' else 'qemu' - - return _LoadTargetClass(_GetPathToBuiltinTarget(device)).CreateFromArgs(args) diff --git a/build/fuchsia/deploy_to_amber_repo.py b/build/fuchsia/deploy_to_amber_repo.py deleted file mode 100755 index 80ac2fedacf3..000000000000 --- a/build/fuchsia/deploy_to_amber_repo.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2019 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Deploys Fuchsia packages to an Amber repository in a Fuchsia -build output directory.""" - -import amber_repo -import argparse -import os -import sys - - -# Populates the GDB-standard symbol directory structure |build_ids_path| with -# the files and build IDs specified in |ids_txt_path|. -def InstallSymbols(ids_txt_path, build_ids_path): - for entry in open(ids_txt_path, 'r'): - build_id, binary_relpath = entry.strip().split(' ') - binary_abspath = os.path.abspath(os.path.join(os.path.dirname(ids_txt_path), - binary_relpath)) - symbol_dir = os.path.join(build_ids_path, build_id[:2]) - symbol_file = os.path.join(symbol_dir, build_id[2:] + '.debug') - - if not os.path.exists(symbol_dir): - os.makedirs(symbol_dir) - - if os.path.islink(symbol_file) or os.path.exists(symbol_file): - # Clobber the existing entry to ensure that the symlink's target is - # up to date. - os.unlink(symbol_file) - - os.symlink(os.path.relpath(binary_abspath, symbol_dir), symbol_file) - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument('--package', action='append', required=True, - help='Paths to packages to install.') - parser.add_argument('--fuchsia-out-dir', - required=True, - help='Path to a Fuchsia build output directory. ' - 'Setting the GN arg ' - '"default_fuchsia_build_dir_for_installation" ' - 'will cause it to be passed here.') - args = parser.parse_args() - assert args.package - - fuchsia_out_dir = os.path.expanduser(args.fuchsia_out_dir) - repo = amber_repo.ExternalAmberRepo( - os.path.join(fuchsia_out_dir, 'amber-files')) - print('Installing packages and symbols in Amber repo %s...' % repo.GetPath()) - - for package in args.package: - repo.PublishPackage(package) - InstallSymbols(os.path.join(os.path.dirname(package), 'ids.txt'), - os.path.join(fuchsia_out_dir, '.build-id')) - - print('Installation success.') - - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/build/fuchsia/device_target.py b/build/fuchsia/device_target.py deleted file mode 100644 index cb0fe50ba25a..000000000000 --- a/build/fuchsia/device_target.py +++ /dev/null @@ -1,280 +0,0 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Implements commands for running and interacting with Fuchsia on devices.""" - -from __future__ import print_function - -import amber_repo -import boot_data -import filecmp -import logging -import os -import re -import subprocess -import sys -import target -import tempfile -import time -import uuid - -from common import SDK_ROOT, EnsurePathExists, GetHostToolPathFromPlatform - -# The maximum times to attempt mDNS resolution when connecting to a freshly -# booted Fuchsia instance before aborting. -BOOT_DISCOVERY_ATTEMPTS = 30 - -# Number of failed connection attempts before redirecting system logs to stdout. -CONNECT_RETRY_COUNT_BEFORE_LOGGING = 10 - -# Number of seconds to wait for device discovery. -BOOT_DISCOVERY_TIMEOUT_SECS = 2 * 60 - -# The timeout limit for one call to the device-finder tool. -_DEVICE_FINDER_TIMEOUT_LIMIT_SECS = \ - BOOT_DISCOVERY_TIMEOUT_SECS / BOOT_DISCOVERY_ATTEMPTS - -# Time between a reboot command is issued and when connection attempts from the -# host begin. -_REBOOT_SLEEP_PERIOD = 20 - - -def GetTargetType(): - return DeviceTarget - - -class DeviceTarget(target.Target): - """Prepares a device to be used as a deployment target. Depending on the - command line parameters, it automatically handling a number of preparatory - steps relating to address resolution. - - If |_node_name| is unset: - If there is one running device, use it for deployment and execution. - - If there are more than one running devices, then abort and instruct the - user to re-run the command with |_node_name| - - If |_node_name| is set: - If there is a running device with a matching nodename, then it is used - for deployment and execution. - - If |_host| is set: - Deploy to a device at the host IP address as-is.""" - - def __init__(self, - out_dir, - target_cpu, - host=None, - node_name=None, - port=None, - ssh_config=None, - fuchsia_out_dir=None, - os_check='update', - system_log_file=None): - """out_dir: The directory which will contain the files that are - generated to support the deployment. - target_cpu: The CPU architecture of the deployment target. Can be - "x64" or "arm64". - host: The address of the deployment target device. - node_name: The node name of the deployment target device. - port: The port of the SSH service on the deployment target device. - ssh_config: The path to SSH configuration data. - fuchsia_out_dir: The path to a Fuchsia build output directory, for - deployments to devices paved with local Fuchsia builds. - os_check: If 'check', the target's SDK version must match. - If 'update', the target will be repaved if the SDK versions - mismatch. - If 'ignore', the target's SDK version is ignored.""" - - super(DeviceTarget, self).__init__(out_dir, target_cpu) - - self._system_log_file = system_log_file - self._host = host - self._port = port - self._fuchsia_out_dir = None - self._node_name = node_name - self._os_check = os_check - self._amber_repo = None - - if self._host and self._node_name: - raise Exception('Only one of "--host" or "--name" can be specified.') - - if fuchsia_out_dir: - if ssh_config: - raise Exception('Only one of "--fuchsia-out-dir" or "--ssh_config" can ' - 'be specified.') - - self._fuchsia_out_dir = os.path.expanduser(fuchsia_out_dir) - # Use SSH keys from the Fuchsia output directory. - self._ssh_config_path = os.path.join(self._fuchsia_out_dir, 'ssh-keys', - 'ssh_config') - self._os_check = 'ignore' - - elif ssh_config: - # Use the SSH config provided via the commandline. - self._ssh_config_path = os.path.expanduser(ssh_config) - - else: - # Default to using an automatically generated SSH config and keys. - boot_data.ProvisionSSH(out_dir) - self._ssh_config_path = boot_data.GetSSHConfigPath(out_dir) - - @staticmethod - def CreateFromArgs(args): - return DeviceTarget(args.out_dir, args.target_cpu, args.host, - args.node_name, args.port, args.ssh_config, - args.fuchsia_out_dir, args.os_check, - args.system_log_file) - - @staticmethod - def RegisterArgs(arg_parser): - device_args = arg_parser.add_argument_group( - 'device', 'External device deployment arguments') - device_args.add_argument('--host', - help='The IP of the target device. Optional.') - device_args.add_argument('--node-name', - help='The node-name of the device to boot or ' - 'deploy to. Optional, will use the first ' - 'discovered device if omitted.') - device_args.add_argument('--port', - '-p', - type=int, - default=None, - help='The port of the SSH service running on the ' - 'device. Optional.') - device_args.add_argument('--ssh-config', - '-F', - help='The path to the SSH configuration used for ' - 'connecting to the target device.') - device_args.add_argument( - '--os-check', - choices=['check', 'update', 'ignore'], - default='update', - help="Sets the OS version enforcement policy. If 'check', then the " - "deployment process will halt if the target\'s version doesn\'t " - "match. If 'update', then the target device will automatically " - "be repaved. If 'ignore', then the OS version won\'t be checked.") - - def _ProvisionDeviceIfNecessary(self): - if self._Discover(): - self._WaitUntilReady() - else: - raise Exception('Could not find device. If the device is connected ' - 'to the host remotely, make sure that --host flag is ' - 'set and that remote serving is set up.') - - def _Discover(self): - """Queries mDNS for the IP address of a booted Fuchsia instance whose name - matches |_node_name| on the local area network. If |_node_name| isn't - specified, and there is only one device on the network, then returns the - IP address of that advice. - - Sets |_host_name| and returns True if the device was found, - or waits up to |timeout| seconds and returns False if the device couldn't - be found.""" - - dev_finder_path = GetHostToolPathFromPlatform('device-finder') - - if self._node_name: - command = [ - dev_finder_path, - 'resolve', - '-timeout', - "%ds" % _DEVICE_FINDER_TIMEOUT_LIMIT_SECS, - '-device-limit', - '1', # Exit early as soon as a host is found. - self._node_name - ] - else: - command = [ - dev_finder_path, 'list', '-full', '-timeout', - "%ds" % _DEVICE_FINDER_TIMEOUT_LIMIT_SECS - ] - - proc = subprocess.Popen(command, - stdout=subprocess.PIPE, - stderr=open(os.devnull, 'w')) - - output = set(proc.communicate()[0].strip().split('\n')) - if proc.returncode != 0: - return False - - if self._node_name: - # Handle the result of "device-finder resolve". - self._host = output.pop().strip() - - else: - name_host_pairs = [x.strip().split(' ') for x in output] - - # Handle the output of "device-finder list". - if len(name_host_pairs) > 1: - print('More than one device was discovered on the network.') - print('Use --node-name to specify the device to use.') - print('\nList of devices:') - for pair in name_host_pairs: - print(' ' + pair[1]) - print() - raise Exception('Ambiguous target device specification.') - - assert len(name_host_pairs) == 1 - self._host, self._node_name = name_host_pairs[0] - - logging.info('Found device "%s" at address %s.' % (self._node_name, - self._host)) - - return True - - def Start(self): - if self._host: - self._WaitUntilReady() - else: - self._ProvisionDeviceIfNecessary() - - def GetAmberRepo(self): - if not self._amber_repo: - if self._fuchsia_out_dir: - # Deploy to an already-booted device running a local Fuchsia build. - self._amber_repo = amber_repo.ExternalAmberRepo( - os.path.join(self._fuchsia_out_dir, 'amber-files')) - else: - # Create an ephemeral Amber repo, then start both "pm serve" as well as - # the bootserver. - self._amber_repo = amber_repo.ManagedAmberRepo(self) - - return self._amber_repo - - def _ParseNodename(self, output): - # Parse the nodename from bootserver stdout. - m = re.search(r'.*Proceeding with nodename (?P.*)$', output, - re.MULTILINE) - if not m: - raise Exception('Couldn\'t parse nodename from bootserver output.') - self._node_name = m.groupdict()['nodename'] - logging.info('Booted device "%s".' % self._node_name) - - # Repeatdly query mDNS until we find the device, or we hit the timeout of - # DISCOVERY_TIMEOUT_SECS. - logging.info('Waiting for device to join network.') - for _ in xrange(BOOT_DISCOVERY_ATTEMPTS): - if self._Discover(): - break - - if not self._host: - raise Exception('Device %s couldn\'t be discovered via mDNS.' % - self._node_name) - - self._WaitUntilReady(); - - def _GetEndpoint(self): - return (self._host, self._port) - - def _GetSshConfigPath(self): - return self._ssh_config_path - - def Restart(self): - """Restart the device.""" - - self.RunCommandPiped('dm reboot') - time.sleep(_REBOOT_SLEEP_PERIOD) - self.Start() diff --git a/build/fuchsia/emu_target.py b/build/fuchsia/emu_target.py deleted file mode 100644 index 4f8ddbb94d48..000000000000 --- a/build/fuchsia/emu_target.py +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Implements commands for running/interacting with Fuchsia on an emulator.""" - -import amber_repo -import boot_data -import logging -import os -import runner_logs -import subprocess -import sys -import target -import tempfile - - -class EmuTarget(target.Target): - def __init__(self, out_dir, target_cpu, system_log_file, fuchsia_out_dir): - """out_dir: The directory which will contain the files that are - generated to support the emulator deployment. - target_cpu: The emulated target CPU architecture. - Can be 'x64' or 'arm64'.""" - - # fuchsia_out_dir is unused by emulator targets. - del fuchsia_out_dir - - super(EmuTarget, self).__init__(out_dir, target_cpu) - self._emu_process = None - self._system_log_file = system_log_file - self._amber_repo = None - - def __enter__(self): - return self - - def _BuildCommand(self): - """Build the command that will be run to start Fuchsia in the emulator.""" - pass - - def _SetEnv(self): - return os.environ.copy() - - # Used by the context manager to ensure that the emulator is killed when - # the Python process exits. - def __exit__(self, exc_type, exc_val, exc_tb): - self.Shutdown(); - - def Start(self): - emu_command = self._BuildCommand() - - # We pass a separate stdin stream. Sharing stdin across processes - # leads to flakiness due to the OS prematurely killing the stream and the - # Python script panicking and aborting. - # The precise root cause is still nebulous, but this fix works. - # See crbug.com/741194. - logging.debug('Launching %s.' % (self.EMULATOR_NAME)) - logging.debug(' '.join(emu_command)) - - # Zircon sends debug logs to serial port (see kernel.serial=legacy flag - # above). Serial port is redirected to a file through emulator stdout. - # If runner_logs are not enabled, we output the kernel serial log - # to a temporary file, and print that out if we are unable to connect to - # the emulator guest, to make it easier to diagnose connectivity issues. - temporary_log_file = None - if runner_logs.IsEnabled(): - stdout = runner_logs.FileStreamFor('serial_log') - else: - temporary_log_file = tempfile.NamedTemporaryFile('w') - stdout = temporary_log_file - - # TODO(crbug.com/1100402): Delete when no longer needed for debug info. - # Log system statistics at the start of the emulator run. - _LogSystemStatistics('system_start_statistics_log') - - self._emu_process = subprocess.Popen(emu_command, - stdin=open(os.devnull), - stdout=stdout, - stderr=subprocess.STDOUT, - env=self._SetEnv()) - - try: - self._WaitUntilReady() - except target.FuchsiaTargetException: - if temporary_log_file: - logging.info('Kernel logs:\n' + - open(temporary_log_file.name, 'r').read()) - raise - - def GetAmberRepo(self): - if not self._amber_repo: - self._amber_repo = amber_repo.ManagedAmberRepo(self) - - return self._amber_repo - - def Shutdown(self): - if not self._emu_process: - logging.error('%s did not start' % (self.EMULATOR_NAME)) - return - returncode = self._emu_process.poll() - if returncode == None: - logging.info('Shutting down %s' % (self.EMULATOR_NAME)) - self._emu_process.kill() - elif returncode == 0: - logging.info('%s quit unexpectedly without errors' % self.EMULATOR_NAME) - elif returncode < 0: - logging.error('%s was terminated by signal %d' % - (self.EMULATOR_NAME, -returncode)) - else: - logging.error('%s quit unexpectedly with exit code %d' % - (self.EMULATOR_NAME, returncode)) - - # TODO(crbug.com/1100402): Delete when no longer needed for debug info. - # Log system statistics at the end of the emulator run. - _LogSystemStatistics('system_end_statistics_log') - - - def _IsEmuStillRunning(self): - if not self._emu_process: - return False - return os.waitpid(self._emu_process.pid, os.WNOHANG)[0] == 0 - - def _GetEndpoint(self): - if not self._IsEmuStillRunning(): - raise Exception('%s quit unexpectedly.' % (self.EMULATOR_NAME)) - return ('localhost', self._host_ssh_port) - - def _GetSshConfigPath(self): - return boot_data.GetSSHConfigPath(self._out_dir) - - -# TODO(crbug.com/1100402): Delete when no longer needed for debug info. -def _LogSystemStatistics(log_file_name): - statistics_log = runner_logs.FileStreamFor(log_file_name) - # Log the cpu load and process information. - subprocess.call(['top', '-b', '-n', '1'], - stdin=open(os.devnull), - stdout=statistics_log, - stderr=subprocess.STDOUT) - subprocess.call(['ps', '-ax'], - stdin=open(os.devnull), - stdout=statistics_log, - stderr=subprocess.STDOUT) diff --git a/build/fuchsia/gcs_download.py b/build/fuchsia/gcs_download.py new file mode 100644 index 000000000000..534091bce9f6 --- /dev/null +++ b/build/fuchsia/gcs_download.py @@ -0,0 +1,51 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import logging +import os +import subprocess +import sys +import tarfile +import tempfile + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + 'test'))) + +from common import DIR_SRC_ROOT + +sys.path.append(os.path.join(DIR_SRC_ROOT, 'build')) +import find_depot_tools + + +def DownloadAndUnpackFromCloudStorage(url, output_dir): + """Fetches a tarball from GCS and uncompresses it to |output_dir|.""" + + # Pass the compressed stream directly to 'tarfile'; don't bother writing it + # to disk first. + tmp_file = 'image.tgz' + with tempfile.TemporaryDirectory() as tmp_d: + tmp_file_location = os.path.join(tmp_d, tmp_file) + cmd = [ + sys.executable, + os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gsutil.py'), 'cp', url, + tmp_file_location + ] + + logging.debug('Running "%s"', ' '.join(cmd)) + task = subprocess.run(cmd, + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + check=True, + encoding='utf-8') + + try: + tarfile.open(name=tmp_file_location, + mode='r|gz').extractall(path=output_dir) + except tarfile.ReadError as exc: + _, stderr_data = task.communicate() + stderr_data = stderr_data.decode() + raise subprocess.CalledProcessError( + task.returncode, cmd, + 'Failed to read a tarfile from gsutil.py.\n{}'.format( + stderr_data if stderr_data else '')) from exc diff --git a/build/fuchsia/gcs_download_test.py b/build/fuchsia/gcs_download_test.py new file mode 100755 index 000000000000..50b2bf1a0a5d --- /dev/null +++ b/build/fuchsia/gcs_download_test.py @@ -0,0 +1,88 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import subprocess +import tarfile +import unittest +from unittest import mock + +from gcs_download import DownloadAndUnpackFromCloudStorage + + +def _mock_task(status_code: int = 0, stderr: str = '') -> mock.Mock: + task_mock = mock.Mock() + attrs = { + 'returncode': status_code, + 'wait.return_value': status_code, + 'communicate.return_value': (None, stderr.encode()), + } + task_mock.configure_mock(**attrs) + + return task_mock + + +@mock.patch('tempfile.TemporaryDirectory') +@mock.patch('subprocess.run') +@mock.patch('tarfile.open') +@unittest.skipIf(os.name == 'nt', 'Fuchsia tests not supported on Windows') +class TestDownloadAndUnpackFromCloudStorage(unittest.TestCase): + def testHappyPath(self, mock_tarfile, mock_run, mock_tmp_dir): + mock_run.return_value = _mock_task() + + tmp_dir = os.path.join('some', 'tmp', 'dir') + mock_tmp_dir.return_value.__enter__.return_value = tmp_dir + + mock_seq = mock.Mock() + mock_seq.attach_mock(mock_run, 'Run') + mock_seq.attach_mock(mock_tarfile, 'Untar') + mock_seq.attach_mock(mock_tmp_dir, 'MkTmpD') + + output_dir = os.path.join('output', 'dir') + DownloadAndUnpackFromCloudStorage('gs://some/url', output_dir) + + image_tgz_path = os.path.join(tmp_dir, 'image.tgz') + mock_seq.assert_has_calls([ + mock.call.MkTmpD(), + mock.call.MkTmpD().__enter__(), + mock.call.Run(mock.ANY, + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + check=True, + encoding='utf-8'), + mock.call.Untar(name=image_tgz_path, mode='r|gz'), + mock.call.Untar().extractall(path=output_dir), + mock.call.MkTmpD().__exit__(None, None, None) + ], + any_order=False) + + # Verify cmd. + cmd = ' '.join(mock_run.call_args[0][0]) + self.assertRegex( + cmd, r'.*python3?\s.*gsutil.py\s+cp\s+gs://some/url\s+' + image_tgz_path) + + def testFailedTarOpen(self, mock_tarfile, mock_run, mock_tmp_dir): + mock_run.return_value = _mock_task(stderr='some error') + mock_tarfile.side_effect = tarfile.ReadError() + + with self.assertRaises(subprocess.CalledProcessError): + DownloadAndUnpackFromCloudStorage('', '') + mock_tmp_dir.assert_called_once() + mock_run.assert_called_once() + mock_tarfile.assert_called_once() + + def testBadTaskStatusCode(self, mock_tarfile, mock_run, mock_tmp_dir): + mock_run.side_effect = subprocess.CalledProcessError(cmd='some/command', + returncode=1) + + with self.assertRaises(subprocess.CalledProcessError): + DownloadAndUnpackFromCloudStorage('', '') + mock_run.assert_called_once() + mock_tarfile.assert_not_called() + mock_tmp_dir.assert_called_once() + + +if __name__ == '__main__': + unittest.main() diff --git a/build/fuchsia/generic_x64_target.py b/build/fuchsia/generic_x64_target.py deleted file mode 100644 index 5fece127d976..000000000000 --- a/build/fuchsia/generic_x64_target.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. -"""Implements commands for running and interacting with Fuchsia generic -build on devices.""" - -import boot_data -import device_target -import logging -import os - -from common import SDK_ROOT, EnsurePathExists, \ - GetHostToolPathFromPlatform, SubprocessCallWithTimeout - - -def GetTargetType(): - return GenericX64PavedDeviceTarget - - -class GenericX64PavedDeviceTarget(device_target.DeviceTarget): - """In addition to the functionality provided by DeviceTarget, this class - automatically handles paving of x64 devices that use generic Fuchsia build. - - If there are no running devices, then search for a device running Zedboot - and pave it. - - If there's only one running device, or |_node_name| is set, then the - device's SDK version is checked unless --os-check=ignore is set. - If --os-check=update is set, then the target device is repaved if the SDK - version doesn't match.""" - - TARGET_HASH_FILE_PATH = '/data/.hash' - - def _SDKHashMatches(self): - """Checks if /data/.hash on the device matches SDK_ROOT/.hash. - - Returns True if the files are identical, or False otherwise. - """ - - with tempfile.NamedTemporaryFile() as tmp: - # TODO: Avoid using an exception for when file is unretrievable. - try: - self.GetFile(TARGET_HASH_FILE_PATH, tmp.name) - except subprocess.CalledProcessError: - # If the file is unretrievable for whatever reason, assume mismatch. - return False - - return filecmp.cmp(tmp.name, os.path.join(SDK_ROOT, '.hash'), False) - - def _ProvisionDeviceIfNecessary(self): - should_provision = False - - if self._Discover(): - self._WaitUntilReady() - - if self._os_check != 'ignore': - if self._SDKHashMatches(): - if self._os_check == 'update': - logging.info('SDK hash does not match; rebooting and repaving.') - self.RunCommand(['dm', 'reboot']) - should_provision = True - elif self._os_check == 'check': - raise Exception('Target device SDK version does not match.') - else: - should_provision = True - - if should_provision: - self._ProvisionDevice() - - def _ProvisionDevice(self): - """Pave a device with a generic image of Fuchsia.""" - - bootserver_path = GetHostToolPathFromPlatform('bootserver') - bootserver_command = [ - bootserver_path, '-1', '--fvm', - EnsurePathExists( - boot_data.GetTargetFile('storage-sparse.blk', - self._GetTargetSdkArch(), - boot_data.TARGET_TYPE_GENERIC)), - EnsurePathExists( - boot_data.GetBootImage(self._out_dir, self._GetTargetSdkArch(), - boot_data.TARGET_TYPE_GENERIC)) - ] - - if self._node_name: - bootserver_command += ['-n', self._node_name] - - bootserver_command += ['--'] - bootserver_command += boot_data.GetKernelArgs(self._out_dir) - - logging.debug(' '.join(bootserver_command)) - _, stdout = SubprocessCallWithTimeout(bootserver_command, - silent=False, - timeout_secs=300) - - self._ParseNodename(stdout) - - # Update the target's hash to match the current tree's. - self.PutFile(os.path.join(SDK_ROOT, '.hash'), TARGET_HASH_FILE_PATH) diff --git a/build/fuchsia/linux.sdk.sha1 b/build/fuchsia/linux.sdk.sha1 deleted file mode 100644 index 6a5b8b21a39f..000000000000 --- a/build/fuchsia/linux.sdk.sha1 +++ /dev/null @@ -1 +0,0 @@ -4.20210519.3.1 diff --git a/build/fuchsia/linux_internal.sdk.sha1 b/build/fuchsia/linux_internal.sdk.sha1 new file mode 100644 index 000000000000..d9ca1d93be3c --- /dev/null +++ b/build/fuchsia/linux_internal.sdk.sha1 @@ -0,0 +1 @@ +12.20230425.1.1 diff --git a/build/fuchsia/mac.sdk.sha1 b/build/fuchsia/mac.sdk.sha1 deleted file mode 100644 index 6a5b8b21a39f..000000000000 --- a/build/fuchsia/mac.sdk.sha1 +++ /dev/null @@ -1 +0,0 @@ -4.20210519.3.1 diff --git a/build/fuchsia/net_test_server.py b/build/fuchsia/net_test_server.py deleted file mode 100644 index 56005cf12cca..000000000000 --- a/build/fuchsia/net_test_server.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import common -import json -import logging -import os -import re -import socket -import sys -import subprocess -import tempfile - -DIR_SOURCE_ROOT = os.path.abspath( - os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) -sys.path.append(os.path.join(DIR_SOURCE_ROOT, 'build', 'util', 'lib', 'common')) -import chrome_test_server_spawner - - -# Implementation of chrome_test_server_spawner.PortForwarder that uses SSH's -# remote port forwarding feature to forward ports. -class SSHPortForwarder(chrome_test_server_spawner.PortForwarder): - def __init__(self, target): - self._target = target - - # Maps the host (server) port to the device port number. - self._port_mapping = {} - - def Map(self, port_pairs): - for p in port_pairs: - _, host_port = p - self._port_mapping[host_port] = \ - common.ConnectPortForwardingTask(self._target, host_port) - - def GetDevicePortForHostPort(self, host_port): - return self._port_mapping[host_port] - - def Unmap(self, device_port): - for host_port, entry in self._port_mapping.iteritems(): - if entry == device_port: - forwarding_args = [ - '-NT', '-O', 'cancel', '-R', '0:localhost:%d' % host_port] - task = self._target.RunCommandPiped([], - ssh_args=forwarding_args, - stdout=open(os.devnull, 'w'), - stderr=subprocess.PIPE) - task.wait() - if task.returncode != 0: - raise Exception( - 'Error %d when unmapping port %d' % (task.returncode, - device_port)) - del self._port_mapping[host_port] - return - - raise Exception('Unmap called for unknown port: %d' % device_port) - - -def SetupTestServer(target, test_concurrency, for_package, for_realms=[]): - """Provisions a forwarding test server and configures |target| to use it. - - Returns a Popen object for the test server process.""" - - logging.debug('Starting test server.') - # The TestLauncher can launch more jobs than the limit specified with - # --test-launcher-jobs so the max number of spawned test servers is set to - # twice that limit here. See https://crbug.com/913156#c19. - spawning_server = chrome_test_server_spawner.SpawningServer( - 0, SSHPortForwarder(target), test_concurrency * 2) - forwarded_port = common.ConnectPortForwardingTask( - target, spawning_server.server_port) - spawning_server.Start() - - logging.debug('Test server listening for connections (port=%d)' % - spawning_server.server_port) - logging.debug('Forwarded port is %d' % forwarded_port) - - config_file = tempfile.NamedTemporaryFile(delete=True) - - config_file.write(json.dumps({ - 'spawner_url_base': 'http://localhost:%d' % forwarded_port - })) - - config_file.flush() - target.PutFile(config_file.name, - '/tmp/net-test-server-config', - for_package=for_package, - for_realms=for_realms) - - return spawning_server diff --git a/build/fuchsia/qemu_image.py b/build/fuchsia/qemu_image.py deleted file mode 100644 index ab5e040acbd4..000000000000 --- a/build/fuchsia/qemu_image.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Workaround for qemu-img bug on arm64 platforms with multiple cores. - -Runs qemu-img command with timeout and retries the command if it hangs. - -See: -crbug.com/1046861 QEMU is out of date; current version of qemu-img -is unstable - -https://bugs.launchpad.net/qemu/+bug/1805256 qemu-img hangs on -rcu_call_ready_event logic in Aarch64 when converting images - -TODO(crbug.com/1046861): Remove this workaround when the bug is fixed. -""" - -import logging -import subprocess -import tempfile -import time - - -# qemu-img p99 run time on Cavium ThunderX2 servers is 26 seconds. -# Using 2x the p99 time as the timeout. -QEMU_IMG_TIMEOUT_SEC = 52 - - -def _ExecQemuImgWithTimeout(command): - """Execute qemu-img command in subprocess with timeout. - - Returns: None if command timed out or return code if command completed. - """ - - logging.info('qemu-img starting') - command_output_file = tempfile.NamedTemporaryFile('w') - p = subprocess.Popen(command, stdout=command_output_file, - stderr=subprocess.STDOUT) - start_sec = time.time() - while p.poll() is None and time.time() - start_sec < QEMU_IMG_TIMEOUT_SEC: - time.sleep(1) - stop_sec = time.time() - logging.info('qemu-img duration: %f' % float(stop_sec - start_sec)) - - if p.poll() is None: - returncode = None - p.kill() - p.wait() - else: - returncode = p.returncode - - log_level = logging.WARN if returncode else logging.DEBUG - for line in open(command_output_file.name, 'r'): - logging.log(log_level, 'qemu-img stdout: ' + line.strip()) - - return returncode - - -def ExecQemuImgWithRetry(command): - """ Execute qemu-img command in subprocess with 2 retries. - - Raises CalledProcessError if command does not complete successfully. - """ - - tries = 0 - status = None - while status is None and tries <= 2: - tries += 1 - status = _ExecQemuImgWithTimeout(command) - - if status is None: - raise subprocess.CalledProcessError(-1, command) - if status: - raise subprocess.CalledProcessError(status, command) diff --git a/build/fuchsia/qemu_target.py b/build/fuchsia/qemu_target.py deleted file mode 100644 index 236c41d28267..000000000000 --- a/build/fuchsia/qemu_target.py +++ /dev/null @@ -1,255 +0,0 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Implements commands for running and interacting with Fuchsia on QEMU.""" - -import boot_data -import common -import emu_target -import hashlib -import logging -import os -import platform -import qemu_image -import shutil -import subprocess -import sys -import tempfile - -from common import GetHostArchFromPlatform, GetEmuRootForPlatform -from common import EnsurePathExists -from qemu_image import ExecQemuImgWithRetry -from target import FuchsiaTargetException - - -# Virtual networking configuration data for QEMU. -GUEST_NET = '192.168.3.0/24' -GUEST_IP_ADDRESS = '192.168.3.9' -HOST_IP_ADDRESS = '192.168.3.2' -GUEST_MAC_ADDRESS = '52:54:00:63:5e:7b' - -# Capacity of the system's blobstore volume. -EXTENDED_BLOBSTORE_SIZE = 1073741824 # 1GB - - -def GetTargetType(): - return QemuTarget - - -class QemuTarget(emu_target.EmuTarget): - EMULATOR_NAME = 'qemu' - - def __init__(self, - out_dir, - target_cpu, - system_log_file, - cpu_cores, - require_kvm, - ram_size_mb, - fuchsia_out_dir=None): - super(QemuTarget, self).__init__(out_dir, target_cpu, system_log_file, - fuchsia_out_dir) - self._cpu_cores=cpu_cores - self._require_kvm=require_kvm - self._ram_size_mb=ram_size_mb - - @staticmethod - def CreateFromArgs(args): - return QemuTarget(args.out_dir, args.target_cpu, args.system_log_file, - args.cpu_cores, args.require_kvm, args.ram_size_mb, - args.fuchsia_out_dir) - - def _IsKvmEnabled(self): - kvm_supported = sys.platform.startswith('linux') and \ - os.access('/dev/kvm', os.R_OK | os.W_OK) - same_arch = \ - (self._target_cpu == 'arm64' and platform.machine() == 'aarch64') or \ - (self._target_cpu == 'x64' and platform.machine() == 'x86_64') - if kvm_supported and same_arch: - return True - elif self._require_kvm: - if same_arch: - if not os.path.exists('/dev/kvm'): - kvm_error = 'File /dev/kvm does not exist. Please install KVM first.' - else: - kvm_error = 'To use KVM acceleration, add user to the kvm group '\ - 'with "sudo usermod -a -G kvm $USER". Log out and back '\ - 'in for the change to take effect.' - raise FuchsiaTargetException(kvm_error) - else: - raise FuchsiaTargetException('KVM unavailable when CPU architecture '\ - 'of host is different from that of'\ - ' target. See --allow-no-kvm.') - else: - return False - - def _BuildQemuConfig(self): - boot_data.AssertBootImagesExist(self._GetTargetSdkArch(), 'qemu') - - emu_command = [ - '-kernel', - EnsurePathExists( - boot_data.GetTargetFile('qemu-kernel.kernel', - self._GetTargetSdkArch(), - boot_data.TARGET_TYPE_QEMU)), - '-initrd', - EnsurePathExists( - boot_data.GetBootImage(self._out_dir, self._GetTargetSdkArch(), - boot_data.TARGET_TYPE_QEMU)), - '-m', - str(self._ram_size_mb), - '-smp', - str(self._cpu_cores), - - # Attach the blobstore and data volumes. Use snapshot mode to discard - # any changes. - '-snapshot', - '-drive', - 'file=%s,format=qcow2,if=none,id=blobstore,snapshot=on' % - _EnsureBlobstoreQcowAndReturnPath(self._out_dir, - self._GetTargetSdkArch()), - '-device', - 'virtio-blk-pci,drive=blobstore', - - # Use stdio for the guest OS only; don't attach the QEMU interactive - # monitor. - '-serial', - 'stdio', - '-monitor', - 'none', - ] - - # Configure the machine to emulate, based on the target architecture. - if self._target_cpu == 'arm64': - emu_command.extend([ - '-machine','virt,gic_version=3', - ]) - else: - emu_command.extend([ - '-machine', 'q35', - ]) - - # Configure virtual network. It is used in the tests to connect to - # testserver running on the host. - netdev_type = 'virtio-net-pci' - netdev_config = 'user,id=net0,net=%s,dhcpstart=%s,host=%s' % \ - (GUEST_NET, GUEST_IP_ADDRESS, HOST_IP_ADDRESS) - - self._host_ssh_port = common.GetAvailableTcpPort() - netdev_config += ",hostfwd=tcp::%s-:22" % self._host_ssh_port - emu_command.extend([ - '-netdev', netdev_config, - '-device', '%s,netdev=net0,mac=%s' % (netdev_type, GUEST_MAC_ADDRESS), - ]) - - # Configure the CPU to emulate. - # On Linux, we can enable lightweight virtualization (KVM) if the host and - # guest architectures are the same. - if self._IsKvmEnabled(): - kvm_command = ['-enable-kvm', '-cpu'] - if self._target_cpu == 'arm64': - kvm_command.append('host') - else: - kvm_command.append('host,migratable=no,+invtsc') - else: - logging.warning('Unable to launch %s with KVM acceleration. ' - 'The guest VM will be slow.' % (self.EMULATOR_NAME)) - if self._target_cpu == 'arm64': - kvm_command = ['-cpu', 'cortex-a53'] - else: - kvm_command = ['-cpu', 'Haswell,+smap,-check,-fsgsbase'] - - emu_command.extend(kvm_command) - - kernel_args = boot_data.GetKernelArgs(self._out_dir) - - # TERM=dumb tells the guest OS to not emit ANSI commands that trigger - # noisy ANSI spew from the user's terminal emulator. - kernel_args.append('TERM=dumb') - - # Construct kernel cmd line - kernel_args.append('kernel.serial=legacy') - - # Don't 'reboot' the emulator if the kernel crashes - kernel_args.append('kernel.halt-on-panic=true') - - emu_command.extend(['-append', ' '.join(kernel_args)]) - - return emu_command - - def _BuildCommand(self): - if self._target_cpu == 'arm64': - qemu_exec = 'qemu-system-' + 'aarch64' - elif self._target_cpu == 'x64': - qemu_exec = 'qemu-system-' + 'x86_64' - else: - raise Exception('Unknown target_cpu %s:' % self._target_cpu) - - qemu_command = [ - os.path.join(GetEmuRootForPlatform(self.EMULATOR_NAME), 'bin', - qemu_exec) - ] - qemu_command.extend(self._BuildQemuConfig()) - qemu_command.append('-nographic') - return qemu_command - -def _ComputeFileHash(filename): - hasher = hashlib.md5() - with open(filename, 'rb') as f: - buf = f.read(4096) - while buf: - hasher.update(buf) - buf = f.read(4096) - - return hasher.hexdigest() - - -def _EnsureBlobstoreQcowAndReturnPath(out_dir, target_arch): - """Returns a file containing the Fuchsia blobstore in a QCOW format, - with extra buffer space added for growth.""" - - qimg_tool = os.path.join(common.GetEmuRootForPlatform('qemu'), - 'bin', 'qemu-img') - fvm_tool = common.GetHostToolPathFromPlatform('fvm') - blobstore_path = boot_data.GetTargetFile('storage-full.blk', target_arch, - 'qemu') - qcow_path = os.path.join(out_dir, 'gen', 'blobstore.qcow') - - # Check a hash of the blobstore to determine if we can re-use an existing - # extended version of it. - blobstore_hash_path = os.path.join(out_dir, 'gen', 'blobstore.hash') - current_blobstore_hash = _ComputeFileHash(blobstore_path) - - if os.path.exists(blobstore_hash_path) and os.path.exists(qcow_path): - if current_blobstore_hash == open(blobstore_hash_path, 'r').read(): - return qcow_path - - # Add some extra room for growth to the Blobstore volume. - # Fuchsia is unable to automatically extend FVM volumes at runtime so the - # volume enlargement must be performed prior to QEMU startup. - - # The 'fvm' tool only supports extending volumes in-place, so make a - # temporary copy of 'blobstore.bin' before it's mutated. - extended_blobstore = tempfile.NamedTemporaryFile() - shutil.copyfile(blobstore_path, extended_blobstore.name) - subprocess.check_call([fvm_tool, extended_blobstore.name, 'extend', - '--length', str(EXTENDED_BLOBSTORE_SIZE), - blobstore_path]) - - # Construct a QCOW image from the extended, temporary FVM volume. - # The result will be retained in the build output directory for re-use. - qemu_img_cmd = [qimg_tool, 'convert', '-f', 'raw', '-O', 'qcow2', - '-c', extended_blobstore.name, qcow_path] - # TODO(crbug.com/1046861): Remove arm64 call with retries when bug is fixed. - if common.GetHostArchFromPlatform() == 'arm64': - qemu_image.ExecQemuImgWithRetry(qemu_img_cmd) - else: - subprocess.check_call(qemu_img_cmd) - - # Write out a hash of the original blobstore file, so that subsequent runs - # can trivially check if a cached extended FVM volume is available for reuse. - with open(blobstore_hash_path, 'w') as blobstore_hash_file: - blobstore_hash_file.write(current_blobstore_hash) - - return qcow_path diff --git a/build/fuchsia/qemu_target_test.py b/build/fuchsia/qemu_target_test.py deleted file mode 100755 index 44b380290926..000000000000 --- a/build/fuchsia/qemu_target_test.py +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/python2 -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import qemu_target -import shutil -import subprocess -import tempfile -import time -import unittest - -TEST_PAYLOAD = "Let's get this payload across the finish line!" - -tmpdir = tempfile.mkdtemp() - -# Register the target with the context manager so that it always gets -# torn down on process exit. Otherwise there might be lingering QEMU instances -# if Python crashes or is interrupted. -with qemu_target.QemuTarget(tmpdir, 'x64') as target: - class TestQemuTarget(unittest.TestCase): - @classmethod - def setUpClass(cls): - target.Start() - - @classmethod - def tearDownClass(cls): - target.Shutdown() - shutil.rmtree(tmpdir) - - def testCopyBidirectional(self): - tmp_path = tmpdir + "/payload" - with open(tmp_path, "w") as tmpfile: - tmpfile.write(TEST_PAYLOAD) - target.PutFile(tmp_path, '/tmp/payload') - - tmp_path_roundtrip = tmp_path + ".roundtrip" - target.GetFile('/tmp/payload', tmp_path_roundtrip) - with open(tmp_path_roundtrip) as roundtrip: - self.assertEqual(TEST_PAYLOAD, roundtrip.read()) - - def testRunCommand(self): - self.assertEqual(0, target.RunCommand(['true'])) - self.assertEqual(1, target.RunCommand(['false'])) - - def testRunCommandPiped(self): - proc = target.RunCommandPiped(['cat'], - stdin=subprocess.PIPE, - stdout=subprocess.PIPE) - proc.stdin.write(TEST_PAYLOAD) - proc.stdin.flush() - proc.stdin.close() - self.assertEqual(TEST_PAYLOAD, proc.stdout.readline()) - proc.kill() - - - if __name__ == '__main__': - unittest.main() diff --git a/build/fuchsia/remote_cmd.py b/build/fuchsia/remote_cmd.py deleted file mode 100644 index 56aa8b17213d..000000000000 --- a/build/fuchsia/remote_cmd.py +++ /dev/null @@ -1,131 +0,0 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import logging -import os -import subprocess -import threading - -from common import SubprocessCallWithTimeout - -_SSH = ['ssh'] -_SCP = ['scp', '-C'] # Use gzip compression. -_SSH_LOGGER = logging.getLogger('ssh') - -COPY_TO_TARGET = 0 -COPY_FROM_TARGET = 1 - - -def _IsLinkLocalIPv6(hostname): - return hostname.startswith('fe80::') - -def _EscapeIfIPv6Address(address): - if ':' in address: - return '[' + address + ']' - else: - return address - -class CommandRunner(object): - """Helper class used to execute commands on a remote host over SSH.""" - - def __init__(self, config_path, host, port): - """Creates a CommandRunner that connects to the specified |host| and |port| - using the ssh config at the specified |config_path|. - - config_path: Full path to SSH configuration. - host: The hostname or IP address of the remote host. - port: The port to connect to.""" - - self._config_path = config_path - self._host = host - self._port = port - - def _GetSshCommandLinePrefix(self): - cmd_prefix = _SSH + ['-F', self._config_path, self._host] - if self._port: - cmd_prefix += ['-p', str(self._port)] - return cmd_prefix - - def RunCommand(self, command, silent, timeout_secs=None): - """Executes an SSH command on the remote host and blocks until completion. - - command: A list of strings containing the command and its arguments. - silent: If true, suppresses all output from 'ssh'. - timeout_secs: If set, limits the amount of time that |command| may run. - Commands which exceed the timeout are killed. - - Returns the exit code from the remote command.""" - - ssh_command = self._GetSshCommandLinePrefix() + command - logging.warning(ssh_command) - _SSH_LOGGER.debug('ssh exec: ' + ' '.join(ssh_command)) - retval, _, _ = SubprocessCallWithTimeout(ssh_command, silent, timeout_secs) - return retval - - - def RunCommandPiped(self, command, stdout, stderr, ssh_args = None, **kwargs): - """Executes an SSH command on the remote host and returns a process object - with access to the command's stdio streams. Does not block. - - command: A list of strings containing the command and its arguments. - stdout: subprocess stdout. Must not be None. - stderr: subprocess stderr. Must not be None. - ssh_args: Arguments that will be passed to SSH. - kwargs: A dictionary of parameters to be passed to subprocess.Popen(). - The parameters can be used to override stdin and stdout, for - example. - - Returns a Popen object for the command.""" - - if not stdout or not stderr: - raise Exception('Stdout/stderr must be specified explicitly') - - if not ssh_args: - ssh_args = [] - - ssh_command = self._GetSshCommandLinePrefix() + ssh_args + ['--'] + command - logging.warning(ssh_command) - _SSH_LOGGER.debug(' '.join(ssh_command)) - return subprocess.Popen(ssh_command, stdout=stdout, stderr=stderr, **kwargs) - - - def RunScp(self, sources, dest, direction, recursive=False): - """Copies a file to or from a remote host using SCP and blocks until - completion. - - sources: Paths of the files to be copied. - dest: The path that |source| will be copied to. - direction: Indicates whether the file should be copied to - or from the remote side. - Valid values are COPY_TO_TARGET or COPY_FROM_TARGET. - recursive: If true, performs a recursive copy. - - Function will raise an assertion if a failure occurred.""" - - scp_command = _SCP[:] - if _SSH_LOGGER.getEffectiveLevel() == logging.DEBUG: - scp_command.append('-v') - if recursive: - scp_command.append('-r') - - host = _EscapeIfIPv6Address(self._host) - - if direction == COPY_TO_TARGET: - dest = "%s:%s" % (host, dest) - else: - sources = ["%s:%s" % (host, source) for source in sources] - - scp_command += ['-F', self._config_path] - if self._port: - scp_command += ['-P', str(self._port)] - scp_command += sources - scp_command += [dest] - - _SSH_LOGGER.debug(' '.join(scp_command)) - try: - scp_output = subprocess.check_output(scp_command, - stderr=subprocess.STDOUT) - except subprocess.CalledProcessError as error: - _SSH_LOGGER.info(error.output) - raise diff --git a/build/fuchsia/run_test_package.py b/build/fuchsia/run_test_package.py deleted file mode 100644 index e072e1f5e8be..000000000000 --- a/build/fuchsia/run_test_package.py +++ /dev/null @@ -1,278 +0,0 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. -"""Contains a helper function for deploying and executing a packaged -executable on a Target.""" - -from __future__ import print_function - -import common -import hashlib -import logging -import multiprocessing -import os -import re -import select -import subprocess -import sys -import threading -import uuid - -from symbolizer import BuildIdsPaths, RunSymbolizer, SymbolizerFilter - -FAR = common.GetHostToolPathFromPlatform('far') - -# Amount of time to wait for the termination of the system log output thread. -_JOIN_TIMEOUT_SECS = 5 - - -def _AttachKernelLogReader(target): - """Attaches a kernel log reader as a long-running SSH task.""" - - logging.info('Attaching kernel logger.') - return target.RunCommandPiped(['dlog', '-f'], - stdin=open(os.devnull, 'r'), - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) - - -class SystemLogReader(object): - """Collects and symbolizes Fuchsia system log to a file.""" - - def __init__(self): - self._listener_proc = None - self._symbolizer_proc = None - self._system_log = None - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - """Stops the system logging processes and closes the output file.""" - if self._symbolizer_proc: - self._symbolizer_proc.kill() - if self._listener_proc: - self._listener_proc.kill() - if self._system_log: - self._system_log.close() - - def Start(self, target, package_paths, system_log_file): - """Start a system log reader as a long-running SSH task.""" - logging.debug('Writing fuchsia system log to %s' % system_log_file) - - self._listener_proc = target.RunCommandPiped(['log_listener'], - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) - - self._system_log = open(system_log_file, 'w', buffering=1) - self._symbolizer_proc = RunSymbolizer(self._listener_proc.stdout, - self._system_log, - BuildIdsPaths(package_paths)) - - -class MergedInputStream(object): - """Merges a number of input streams into a UNIX pipe on a dedicated thread. - Terminates when the file descriptor of the primary stream (the first in - the sequence) is closed.""" - - def __init__(self, streams): - assert len(streams) > 0 - self._streams = streams - self._output_stream = None - self._thread = None - - def Start(self): - """Returns a pipe to the merged output stream.""" - - read_pipe, write_pipe = os.pipe() - - self._output_stream = os.fdopen(write_pipe, 'wb', 1) - self._thread = threading.Thread(target=self._Run) - self._thread.start() - - return os.fdopen(read_pipe, 'r') - - def _Run(self): - streams_by_fd = {} - primary_fd = self._streams[0].fileno() - for s in self._streams: - streams_by_fd[s.fileno()] = s - - # Set when the primary FD is closed. Input from other FDs will continue to - # be processed until select() runs dry. - flush = False - - # The lifetime of the MergedInputStream is bound to the lifetime of - # |primary_fd|. - while primary_fd: - # When not flushing: block until data is read or an exception occurs. - rlist, _, xlist = select.select(streams_by_fd, [], streams_by_fd) - - if len(rlist) == 0 and flush: - break - - for fileno in xlist: - del streams_by_fd[fileno] - if fileno == primary_fd: - primary_fd = None - - for fileno in rlist: - line = streams_by_fd[fileno].readline() - if line: - self._output_stream.write(line) - else: - del streams_by_fd[fileno] - if fileno == primary_fd: - primary_fd = None - - # Flush the streams by executing nonblocking reads from the input file - # descriptors until no more data is available, or all the streams are - # closed. - while streams_by_fd: - rlist, _, _ = select.select(streams_by_fd, [], [], 0) - - if not rlist: - break - - for fileno in rlist: - line = streams_by_fd[fileno].readline() - if line: - self._output_stream.write(line) - else: - del streams_by_fd[fileno] - - -def _GetComponentUri(package_name): - return 'fuchsia-pkg://fuchsia.com/%s#meta/%s.cmx' % (package_name, - package_name) - - -class RunTestPackageArgs: - """RunTestPackage() configuration arguments structure. - - code_coverage: If set, the test package will be run via 'runtests', and the - output will be saved to /tmp folder on the device. - system_logging: If set, connects a system log reader to the target. - test_realm_label: Specifies the realm name that run-test-component should use. - This must be specified if a filter file is to be set, or a results summary - file fetched after the test suite has run. - use_run_test_component: If True then the test package will be run hermetically - via 'run-test-component', rather than using 'run'. - """ - - def __init__(self): - self.code_coverage = False - self.system_logging = False - self.test_realm_label = None - self.use_run_test_component = False - - @staticmethod - def FromCommonArgs(args): - run_test_package_args = RunTestPackageArgs() - run_test_package_args.code_coverage = args.code_coverage - run_test_package_args.system_logging = args.include_system_logs - return run_test_package_args - - -def _DrainStreamToStdout(stream, quit_event): - """Outputs the contents of |stream| until |quit_event| is set.""" - - while not quit_event.is_set(): - rlist, _, _ = select.select([stream], [], [], 0.1) - if rlist: - line = rlist[0].readline() - if not line: - return - print(line.rstrip()) - - -def RunTestPackage(output_dir, target, package_paths, package_name, - package_args, args): - """Installs the Fuchsia package at |package_path| on the target, - executes it with |package_args|, and symbolizes its output. - - output_dir: The path containing the build output files. - target: The deployment Target object that will run the package. - package_paths: The paths to the .far packages to be installed. - package_name: The name of the primary package to run. - package_args: The arguments which will be passed to the Fuchsia process. - args: RunTestPackageArgs instance configuring how the package will be run. - - Returns the exit code of the remote package process.""" - - system_logger = (_AttachKernelLogReader(target) - if args.system_logging else None) - try: - if system_logger: - # Spin up a thread to asynchronously dump the system log to stdout - # for easier diagnoses of early, pre-execution failures. - log_output_quit_event = multiprocessing.Event() - log_output_thread = threading.Thread(target=lambda: _DrainStreamToStdout( - system_logger.stdout, log_output_quit_event)) - log_output_thread.daemon = True - log_output_thread.start() - - with target.GetAmberRepo(): - target.InstallPackage(package_paths) - - if system_logger: - log_output_quit_event.set() - log_output_thread.join(timeout=_JOIN_TIMEOUT_SECS) - - logging.info('Running application.') - - # TODO(crbug.com/1156768): Deprecate runtests. - if args.code_coverage: - # runtests requires specifying an output directory and a double dash - # before the argument list. - command = ['runtests', '-o', '/tmp', _GetComponentUri(package_name)] - if args.test_realm_label: - command += ['--realm-label', args.test_realm_label] - command += ['--'] - elif args.use_run_test_component: - command = ['run-test-component'] - if args.test_realm_label: - command += ['--realm-label=%s' % args.test_realm_label] - command.append(_GetComponentUri(package_name)) - else: - command = ['run', _GetComponentUri(package_name)] - - command.extend(package_args) - - process = target.RunCommandPiped(command, - stdin=open(os.devnull, 'r'), - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) - - if system_logger: - output_stream = MergedInputStream( - [process.stdout, system_logger.stdout]).Start() - else: - output_stream = process.stdout - - # Run the log data through the symbolizer process. - output_stream = SymbolizerFilter(output_stream, - BuildIdsPaths(package_paths)) - - for next_line in output_stream: - # TODO(crbug/1198733): Switch to having stream encode to utf-8 directly - # once we drop Python 2 support. - print(next_line.encode('utf-8').rstrip()) - - process.wait() - if process.returncode == 0: - logging.info('Process exited normally with status code 0.') - else: - # The test runner returns an error status code if *any* tests fail, - # so we should proceed anyway. - logging.warning('Process exited with status code %d.' % - process.returncode) - - finally: - if system_logger: - logging.info('Terminating kernel log reader.') - log_output_quit_event.set() - log_output_thread.join() - system_logger.kill() - - return process.returncode diff --git a/build/fuchsia/runner_exceptions.py b/build/fuchsia/runner_exceptions.py deleted file mode 100644 index 03f872e453fa..000000000000 --- a/build/fuchsia/runner_exceptions.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Converts exceptions to return codes and prints error messages. - -This makes it easier to query build tables for particular error types as -exit codes are visible to queries while exception stack traces are not.""" - -import errno -import fcntl -import logging -import os -import subprocess -import sys -import traceback - -from target import FuchsiaTargetException - -def _PrintException(value, trace): - """Prints stack trace and error message for the current exception.""" - - traceback.print_tb(trace) - print(str(value)) - - -def IsStdoutBlocking(): - """Returns True if sys.stdout is blocking or False if non-blocking. - - sys.stdout should always be blocking. Non-blocking is associated with - intermittent IOErrors (crbug.com/1080858). - """ - - nonblocking = fcntl.fcntl(sys.stdout, fcntl.F_GETFL) & os.O_NONBLOCK - return not nonblocking - - -def HandleExceptionAndReturnExitCode(): - """Maps the current exception to a return code and prints error messages. - - Mapped exception types are assigned blocks of 8 return codes starting at 64. - The choice of 64 as the starting code is based on the Advanced Bash-Scripting - Guide (http://tldp.org/LDP/abs/html/exitcodes.html). - - A generic exception is mapped to the start of the block. More specific - exceptions are mapped to numbers inside the block. For example, a - FuchsiaTargetException is mapped to return code 64, unless it involves SSH - in which case it is mapped to return code 65. - - Exceptions not specifically mapped go to return code 1. - - Returns the mapped return code.""" - - (type, value, trace) = sys.exc_info() - _PrintException(value, trace) - - if type is FuchsiaTargetException: - if 'ssh' in str(value).lower(): - print('Error: FuchsiaTargetException: SSH to Fuchsia target failed.') - return 65 - return 64 - elif type is IOError: - if value.errno == errno.EAGAIN: - logging.info('Python print to sys.stdout probably failed') - if not IsStdoutBlocking(): - logging.warn('sys.stdout is non-blocking') - return 73 - return 72 - elif type is subprocess.CalledProcessError: - if os.path.basename(value.cmd[0]) == 'scp': - print('Error: scp operation failed - %s' % str(value)) - return 81 - if os.path.basename(value.cmd[0]) == 'qemu-img': - print('Error: qemu-img fuchsia image generation failed.') - return 82 - return 80 - else: - return 1 diff --git a/build/fuchsia/runner_logs.py b/build/fuchsia/runner_logs.py deleted file mode 100644 index 20ab6b227db2..000000000000 --- a/build/fuchsia/runner_logs.py +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Creates and manages test runner log file objects. - -Provides a context manager object for use in a with statement -and a module level FileStreamFor function for use by clients. -""" - -import collections -import multiprocessing -import os - -from symbolizer import RunSymbolizer - -SYMBOLIZED_SUFFIX = '.symbolized' - -_RunnerLogEntry = collections.namedtuple( - '_RunnerLogEntry', ['name', 'log_file', 'path', 'symbolize']) - -# Module singleton variable. -_instance = None - - -class RunnerLogManager(object): - """ Runner logs object for use in a with statement.""" - - def __init__(self, log_dir, build_ids_files): - global _instance - if _instance: - raise Exception('Only one RunnerLogManager can be instantiated') - - self._log_dir = log_dir - self._build_ids_files = build_ids_files - self._runner_logs = [] - - if self._log_dir and not os.path.isdir(self._log_dir): - os.makedirs(self._log_dir) - - _instance = self - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_value, traceback): - pool = multiprocessing.Pool(4) - for log_entry in self._runner_logs: - pool.apply_async(_FinalizeLog, (log_entry, self._build_ids_files)) - pool.close() - pool.join() - _instance = None - - - def _FileStreamFor(self, name, symbolize): - if any(elem.name == name for elem in self._runner_logs): - raise Exception('RunnerLogManager can only open "%s" once' % name) - - path = os.path.join(self._log_dir, name) if self._log_dir else os.devnull - log_file = open(path, 'w') - - self._runner_logs.append(_RunnerLogEntry(name, log_file, path, symbolize)) - - return log_file - - -def _FinalizeLog(log_entry, build_ids_files): - log_entry.log_file.close() - - if log_entry.symbolize: - input_file = open(log_entry.path, 'r') - output_file = open(log_entry.path + SYMBOLIZED_SUFFIX, 'w') - proc = RunSymbolizer(input_file, output_file, build_ids_files) - proc.wait() - output_file.close() - input_file.close() - - -def IsEnabled(): - """Returns True if the RunnerLogManager has been created, or False if not.""" - - return _instance is not None and _instance._log_dir is not None - - -def FileStreamFor(name, symbolize=False): - """Opens a test runner file stream in the test runner log directory. - - If no test runner log directory is specified, output is discarded. - - name: log file name - symbolize: if True, make a symbolized copy of the log after closing it. - - Returns an opened log file object.""" - - return _instance._FileStreamFor(name, symbolize) if IsEnabled() else open( - os.devnull, 'w') diff --git a/build/fuchsia/symbolizer.py b/build/fuchsia/symbolizer.py deleted file mode 100644 index 8469d110463b..000000000000 --- a/build/fuchsia/symbolizer.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import logging -import os -import subprocess - -from common import SDK_ROOT -from common import GetHostArchFromPlatform -from common import GetHostToolPathFromPlatform - - -def BuildIdsPaths(package_paths): - """Generates build ids paths for symbolizer processes.""" - - build_ids_paths = map( - lambda package_path: os.path.join( - os.path.dirname(package_path), 'ids.txt'), - package_paths) - return build_ids_paths - - -def RunSymbolizer(input_file, output_file, build_ids_files): - """Starts a symbolizer process. - - input_file: Input file to be symbolized. - output_file: Output file for symbolizer stdout and stderr. - build_ids_file: Path to the ids.txt file which maps build IDs to - unstripped binaries on the filesystem. - Returns a Popen object for the started process.""" - - symbolizer = GetHostToolPathFromPlatform('symbolizer') - symbolizer_cmd = [ - symbolizer, '--build-id-dir', - os.path.join(SDK_ROOT, '.build-id') - ] - for build_ids_file in build_ids_files: - symbolizer_cmd.extend(['--ids-txt', build_ids_file]) - - logging.info('Running "%s".' % ' '.join(symbolizer_cmd)) - return subprocess.Popen(symbolizer_cmd, stdin=input_file, stdout=output_file, - stderr=subprocess.STDOUT, close_fds=True) - - -def SymbolizerFilter(input_file, build_ids_files): - """Symbolizes an output stream from a process. - - input_file: Input file to be symbolized. - build_ids_file: Path to the ids.txt file which maps build IDs to - unstripped binaries on the filesystem. - Returns a generator that yields symbolized process output.""" - - symbolizer_proc = RunSymbolizer(input_file, subprocess.PIPE, build_ids_files) - - while True: - # TODO(chonggu): Switch to encoding='utf-8' once we drop Python 2 - # support. - line = symbolizer_proc.stdout.readline().decode('utf-8') - if not line: - break - - # Skip spam emitted by the symbolizer that obscures the symbolized output. - # TODO(https://crbug.com/1069446): Fix the symbolizer and remove this. - if '[[[ELF ' in line: - continue - - yield line - - symbolizer_proc.wait() diff --git a/build/fuchsia/target.py b/build/fuchsia/target.py deleted file mode 100644 index 646490b92258..000000000000 --- a/build/fuchsia/target.py +++ /dev/null @@ -1,322 +0,0 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import json -import logging -import os -import subprocess -import time - -import common -import remote_cmd -import runner_logs - -_SHUTDOWN_CMD = ['dm', 'poweroff'] -_ATTACH_RETRY_INTERVAL = 1 -_ATTACH_RETRY_SECONDS = 120 - -# Amount of time to wait for Amber to complete package installation, as a -# mitigation against hangs due to amber/network-related failures. -_INSTALL_TIMEOUT_SECS = 10 * 60 - - -def _GetPackageUri(package_name): - """Returns the URI for the specified package name.""" - return 'fuchsia-pkg://fuchsia.com/%s' % (package_name) - - -def _GetPackageInfo(package_path): - """Returns a tuple with the name and version of a package.""" - - # Query the metadata file which resides next to the package file. - package_info = json.load( - open(os.path.join(os.path.dirname(package_path), 'package'))) - return package_info['name'], package_info['version'], - - -class _MapIsolatedPathsForPackage: - """Callable object which remaps /data and /tmp paths to their component- - specific locations, based on the package name and test realm path.""" - - def __init__(self, package_name, package_version, realms): - realms_path_fragment = '/r/'.join(['r/sys'] + realms) - package_sub_path = '{2}/fuchsia.com:{0}:{1}#meta:{0}.cmx/'.format( - package_name, package_version, realms_path_fragment) - self.isolated_format = '{0}' + package_sub_path + '{1}' - - def __call__(self, path): - for isolated_directory in ['/data/' , '/tmp/']: - if (path+'/').startswith(isolated_directory): - return self.isolated_format.format(isolated_directory, - path[len(isolated_directory):]) - return path - - -class FuchsiaTargetException(Exception): - def __init__(self, message): - super(FuchsiaTargetException, self).__init__(message) - - -class Target(object): - """Base class representing a Fuchsia deployment target.""" - - def __init__(self, out_dir, target_cpu): - self._out_dir = out_dir - self._started = False - self._dry_run = False - self._target_cpu = target_cpu - self._command_runner = None - - @staticmethod - def CreateFromArgs(args): - raise NotImplementedError() - - @staticmethod - def RegisterArgs(arg_parser): - pass - - # Functions used by the Python context manager for teardown. - def __enter__(self): - return self - def __exit__(self, exc_type, exc_val, exc_tb): - return - - def Start(self): - """Handles the instantiation and connection process for the Fuchsia - target instance.""" - - def IsStarted(self): - """Returns True if the Fuchsia target instance is ready to accept - commands.""" - return self._started - - def IsNewInstance(self): - """Returns True if the connected target instance is newly provisioned.""" - return True - - def GetCommandRunner(self): - """Returns CommandRunner that can be used to execute commands on the - target. Most clients should prefer RunCommandPiped() and RunCommand().""" - self._AssertIsStarted() - - if self._command_runner is None: - host, port = self._GetEndpoint() - self._command_runner = \ - remote_cmd.CommandRunner(self._GetSshConfigPath(), host, port) - - return self._command_runner - - def RunCommandPiped(self, command, **kwargs): - """Starts a remote command and immediately returns a Popen object for the - command. The caller may interact with the streams, inspect the status code, - wait on command termination, etc. - - command: A list of strings representing the command and arguments. - kwargs: A dictionary of parameters to be passed to subprocess.Popen(). - The parameters can be used to override stdin and stdout, for - example. - - Returns: a Popen object. - - Note: method does not block. - """ - logging.debug('running (non-blocking) \'%s\'.', ' '.join(command)) - return self.GetCommandRunner().RunCommandPiped(command, **kwargs) - - def RunCommand(self, command, silent=False, timeout_secs=None): - """Executes a remote command and waits for it to finish executing. - - Returns the exit code of the command. - """ - logging.debug('running \'%s\'.', ' '.join(command)) - return self.GetCommandRunner().RunCommand(command, silent, - timeout_secs=timeout_secs) - - def EnsureIsolatedPathsExist(self, for_package, for_realms): - """Ensures that the package's isolated /data and /tmp exist.""" - for isolated_directory in ['/data', '/tmp']: - self.RunCommand([ - 'mkdir', '-p', - _MapIsolatedPathsForPackage(for_package, 0, - for_realms)(isolated_directory) - ]) - - def PutFile(self, - source, - dest, - recursive=False, - for_package=None, - for_realms=()): - """Copies a file from the local filesystem to the target filesystem. - - source: The path of the file being copied. - dest: The path on the remote filesystem which will be copied to. - recursive: If true, performs a recursive copy. - for_package: If specified, isolated paths in the |dest| are mapped to their - obsolute paths for the package, on the target. This currently - affects the /data and /tmp directories. - for_realms: If specified, identifies the sub-realm of 'sys' under which - isolated paths (see |for_package|) are stored. - """ - assert type(source) is str - self.PutFiles([source], dest, recursive, for_package, for_realms) - - def PutFiles(self, - sources, - dest, - recursive=False, - for_package=None, - for_realms=()): - """Copies files from the local filesystem to the target filesystem. - - sources: List of local file paths to copy from, or a single path. - dest: The path on the remote filesystem which will be copied to. - recursive: If true, performs a recursive copy. - for_package: If specified, /data in the |dest| is mapped to the package's - isolated /data location. - for_realms: If specified, identifies the sub-realm of 'sys' under which - isolated paths (see |for_package|) are stored. - """ - assert type(sources) is tuple or type(sources) is list - if for_package: - self.EnsureIsolatedPathsExist(for_package, for_realms) - dest = _MapIsolatedPathsForPackage(for_package, 0, for_realms)(dest) - logging.debug('copy local:%s => remote:%s', sources, dest) - self.GetCommandRunner().RunScp(sources, dest, remote_cmd.COPY_TO_TARGET, - recursive) - - def GetFile(self, - source, - dest, - for_package=None, - for_realms=(), - recursive=False): - """Copies a file from the target filesystem to the local filesystem. - - source: The path of the file being copied. - dest: The path on the local filesystem which will be copied to. - for_package: If specified, /data in paths in |sources| is mapped to the - package's isolated /data location. - for_realms: If specified, identifies the sub-realm of 'sys' under which - isolated paths (see |for_package|) are stored. - recursive: If true, performs a recursive copy. - """ - assert type(source) is str - self.GetFiles([source], dest, for_package, for_realms, recursive) - - def GetFiles(self, - sources, - dest, - for_package=None, - for_realms=(), - recursive=False): - """Copies files from the target filesystem to the local filesystem. - - sources: List of remote file paths to copy. - dest: The path on the local filesystem which will be copied to. - for_package: If specified, /data in paths in |sources| is mapped to the - package's isolated /data location. - for_realms: If specified, identifies the sub-realm of 'sys' under which - isolated paths (see |for_package|) are stored. - recursive: If true, performs a recursive copy. - """ - assert type(sources) is tuple or type(sources) is list - self._AssertIsStarted() - if for_package: - sources = map(_MapIsolatedPathsForPackage(for_package, 0, for_realms), - sources) - logging.debug('copy remote:%s => local:%s', sources, dest) - return self.GetCommandRunner().RunScp(sources, dest, - remote_cmd.COPY_FROM_TARGET, - recursive) - - def _GetEndpoint(self): - """Returns a (host, port) tuple for the SSH connection to the target.""" - raise NotImplementedError() - - def _GetTargetSdkArch(self): - """Returns the Fuchsia SDK architecture name for the target CPU.""" - if self._target_cpu == 'arm64' or self._target_cpu == 'x64': - return self._target_cpu - raise FuchsiaTargetException('Unknown target_cpu:' + self._target_cpu) - - def _AssertIsStarted(self): - assert self.IsStarted() - - def _WaitUntilReady(self): - logging.info('Connecting to Fuchsia using SSH.') - - host, port = self._GetEndpoint() - end_time = time.time() + _ATTACH_RETRY_SECONDS - ssh_diagnostic_log = runner_logs.FileStreamFor('ssh_diagnostic_log') - while time.time() < end_time: - runner = remote_cmd.CommandRunner(self._GetSshConfigPath(), host, port) - ssh_proc = runner.RunCommandPiped(['true'], - ssh_args=['-v'], - stdout=ssh_diagnostic_log, - stderr=subprocess.STDOUT) - if ssh_proc.wait() == 0: - logging.info('Connected!') - self._started = True - return True - time.sleep(_ATTACH_RETRY_INTERVAL) - - logging.error('Timeout limit reached.') - - raise FuchsiaTargetException('Couldn\'t connect using SSH.') - - def _GetSshConfigPath(self, path): - raise NotImplementedError() - - def GetAmberRepo(self): - """Returns an AmberRepo instance which serves packages for this Target. - Callers should typically call GetAmberRepo() in a |with| statement, and - install and execute commands inside the |with| block, so that the returned - AmberRepo can teardown correctly, if necessary. - """ - raise NotImplementedError() - - def InstallPackage(self, package_paths): - """Installs a package and it's dependencies on the device. If the package is - already installed then it will be updated to the new version. - - package_paths: Paths to the .far files to install. - """ - with self.GetAmberRepo() as amber_repo: - # Publish all packages to the serving TUF repository under |tuf_root|. - for package_path in package_paths: - amber_repo.PublishPackage(package_path) - - # Resolve all packages, to have them pulled into the device/VM cache. - for package_path in package_paths: - package_name, package_version = _GetPackageInfo(package_path) - logging.info('Resolving %s into cache.', package_name) - return_code = self.RunCommand( - ['pkgctl', 'resolve', - _GetPackageUri(package_name), '>/dev/null'], - timeout_secs=_INSTALL_TIMEOUT_SECS) - if return_code != 0: - raise Exception( - 'Error {} while resolving {}.'.format(return_code, package_name)) - - # Verify that the newly resolved versions of packages are reported. - for package_path in package_paths: - # Use pkgctl get-hash to determine which version will be resolved. - package_name, package_version = _GetPackageInfo(package_path) - pkgctl = self.RunCommandPiped( - ['pkgctl', 'get-hash', - _GetPackageUri(package_name)], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - pkgctl_out, pkgctl_err = pkgctl.communicate() - - # Read the expected version from the meta.far Merkel hash file alongside - # the package's FAR. - meta_far_path = os.path.join(os.path.dirname(package_path), 'meta.far') - meta_far_merkel = subprocess.check_output( - [common.GetHostToolPathFromPlatform('merkleroot'), - meta_far_path]).split()[0] - if pkgctl_out != meta_far_merkel: - raise Exception('Hash mismatch for %s after resolve (%s vs %s).' % - (package_name, pkgctl_out, meta_far_merkel)) diff --git a/build/fuchsia/test/.coveragerc b/build/fuchsia/test/.coveragerc new file mode 100644 index 000000000000..815fd4b5face --- /dev/null +++ b/build/fuchsia/test/.coveragerc @@ -0,0 +1,8 @@ +# .coveragerc to control coverage.py + +[report] +# Regexes for lines to exclude from consideration +exclude_lines = + # Don't complain if non-runnable code isn't run: + if __name__ == .__main__.: + diff --git a/build/fuchsia/test/.style.yapf b/build/fuchsia/test/.style.yapf new file mode 100644 index 000000000000..557fa7bf84c0 --- /dev/null +++ b/build/fuchsia/test/.style.yapf @@ -0,0 +1,2 @@ +[style] +based_on_style = pep8 diff --git a/build/fuchsia/test/OWNERS b/build/fuchsia/test/OWNERS new file mode 100644 index 000000000000..90b7846a25b6 --- /dev/null +++ b/build/fuchsia/test/OWNERS @@ -0,0 +1,3 @@ +chonggu@google.com +rohpavone@chromium.org +zijiehe@google.com diff --git a/build/fuchsia/test/PRESUBMIT.py b/build/fuchsia/test/PRESUBMIT.py new file mode 100644 index 000000000000..fc5dcfe8f029 --- /dev/null +++ b/build/fuchsia/test/PRESUBMIT.py @@ -0,0 +1,51 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Top-level presubmit script for build/fuchsia/test. + +See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts +for more details about the presubmit API built into depot_tools. +""" + +USE_PYTHON3 = True + +_EXTRA_PATHS_COMPONENTS = [('testing', )] + +# pylint: disable=invalid-name,missing-function-docstring +def CommonChecks(input_api, output_api): + # Neither running nor linting Fuchsia tests is supported on Windows. + if input_api.is_windows: + return [] + + tests = [] + + chromium_src_path = input_api.os_path.realpath( + input_api.os_path.join(input_api.PresubmitLocalPath(), '..', '..', + '..')) + pylint_extra_paths = [ + input_api.os_path.join(chromium_src_path, *component) + for component in _EXTRA_PATHS_COMPONENTS + ] + tests.extend( + input_api.canned_checks.GetPylint(input_api, + output_api, + extra_paths_list=pylint_extra_paths, + pylintrc='pylintrc', + version='2.7')) + + # coveragetest.py is responsible for running unit tests in this directory + tests.append( + input_api.Command( + name='coveragetest', + cmd=[input_api.python3_executable, 'coveragetest.py'], + kwargs={}, + message=output_api.PresubmitError)) + return input_api.RunTests(tests) + + +def CheckChangeOnUpload(input_api, output_api): + return CommonChecks(input_api, output_api) + + +def CheckChangeOnCommit(input_api, output_api): + return CommonChecks(input_api, output_api) diff --git a/build/fuchsia/test/base_ermine_ctl.py b/build/fuchsia/test/base_ermine_ctl.py new file mode 100644 index 000000000000..c7519867869f --- /dev/null +++ b/build/fuchsia/test/base_ermine_ctl.py @@ -0,0 +1,201 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Adds python interface to erminectl tools on workstation products.""" + +import logging +import subprocess +import time +from typing import List, Tuple + + +class BaseErmineCtl: + """Compatible class for automating control of Ermine and its OOBE. + + Must be used after checking if the tool exists. + + Usage: + ctl = base_ermine_ctl.BaseErmineCtl(some_target) + if ctl.exists: + ctl.take_to_shell() + + logging.info('In the shell') + else: + logging.info('Tool does not exist!') + + This is only necessary after a target reboot or provision (IE pave). + """ + + _OOBE_PASSWORD = 'workstation_test_password' + _TOOL = 'erminectl' + _OOBE_SUBTOOL = 'oobe' + _MAX_STATE_TRANSITIONS = 5 + + # Mapping between the current state and the next command to run + # to move it to the next state. + _STATE_TO_NEXT = { + 'SetPassword': ['set_password', _OOBE_PASSWORD], + 'Unknown': ['skip'], + 'Shell': [], + 'Login': ['login', _OOBE_PASSWORD], + } + _COMPLETE_STATE = 'Shell' + + _READY_TIMEOUT = 10 + _WAIT_ATTEMPTS = 10 + _WAIT_FOR_READY_SLEEP_SEC = 3 + + def __init__(self): + self._ermine_exists = False + self._ermine_exists_check = False + + # pylint: disable=no-self-use + # Overridable method to determine how command gets executed. + def execute_command_async(self, args: List[str]) -> subprocess.Popen: + """Executes command asynchronously, returning immediately.""" + raise NotImplementedError + + # pylint: enable=no-self-use + + @property + def exists(self) -> bool: + """Returns the existence of the tool. + + Checks whether the tool exists on and caches the result. + + Returns: + True if the tool exists, False if not. + """ + if not self._ermine_exists_check: + self._ermine_exists = self._execute_tool(['--help'], + can_fail=True) == 0 + self._ermine_exists_check = True + logging.debug('erminectl exists: %s', + ('true' if self._ermine_exists else 'false')) + return self._ermine_exists + + @property + def status(self) -> Tuple[int, str]: + """Returns the status of ermine. + + Note that if the tool times out or does not exist, a non-zero code + is returned. + + Returns: + Tuple of (return code, status as string). -1 for timeout. + Raises: + AssertionError: if the tool does not exist. + """ + assert self.exists, (f'Tool {self._TOOL} cannot have a status if' + ' it does not exist') + # Executes base command, which returns status. + proc = self._execute_tool_async([]) + try: + proc.wait(timeout=self._READY_TIMEOUT) + except subprocess.TimeoutExpired: + logging.warning('Timed out waiting for status') + return -1, 'Timeout' + stdout, _ = proc.communicate() + return proc.returncode, stdout.strip() + + @property + def ready(self) -> bool: + """Indicates if the tool is ready for regular use. + + Returns: + False if not ready, and True if ready. + Raises: + AssertionError: if the tool does not exist. + """ + assert self.exists, (f'Tool {self._TOOL} cannot be ready if' + ' it does not exist') + return_code, _ = self.status + return return_code == 0 + + def _execute_tool_async(self, command: List[str]) -> subprocess.Popen: + """Executes a sub-command asynchronously. + + Args: + command: list of strings to compose the command. Forwards to the + command runner. + Returns: + Popen of the subprocess. + """ + full_command = [self._TOOL, self._OOBE_SUBTOOL] + full_command.extend(command) + + # Returns immediately with Popen. + return self.execute_command_async(full_command) + + def _execute_tool(self, command: List[str], can_fail: bool = False) -> int: + """Executes a sub-command of the tool synchronously. + Raises exception if non-zero returncode is given and |can_fail| = False. + + Args: + command: list of strings to compose the command. Forwards to the + command runner. + can_fail: Whether or not the command can fail. + Raises: + RuntimeError: if non-zero returncode is returned and can_fail = + False. + Returns: + Return code of command execution if |can_fail| is True. + """ + proc = self._execute_tool_async(command) + stdout, stderr = proc.communicate() + if not can_fail and proc.returncode != 0: + raise RuntimeError(f'Command {" ".join(command)} failed.' + f'\nSTDOUT: {stdout}\nSTDERR: {stderr}') + return proc.returncode + + def wait_until_ready(self) -> None: + """Waits until the tool is ready through sleep-poll. + + The tool may not be ready after a pave or restart. + This checks the status and exits after its ready or Timeout. + + Raises: + TimeoutError: if tool is not ready after certain amount of attempts. + AssertionError: if tool does not exist. + """ + assert self.exists, f'Tool {self._TOOL} must exist to use it.' + for _ in range(self._WAIT_ATTEMPTS): + if self.ready: + return + time.sleep(self._WAIT_FOR_READY_SLEEP_SEC) + raise TimeoutError('Timed out waiting for a valid status to return') + + def take_to_shell(self) -> None: + """Takes device to shell after waiting for tool to be ready. + + Examines the current state of the device after waiting for it to be + ready. Once ready, goes through the states of logging in. This is: + - CreatePassword -> Skip screen -> Shell + - Login -> Shell + - Shell + + Regardless of starting state, this will exit once the shell state is + reached. + + Raises: + NotImplementedError: if an unknown state is reached. + RuntimeError: If number of state transitions exceeds the max number + that is expected. + """ + self.wait_until_ready() + _, state = self.status + max_states = self._MAX_STATE_TRANSITIONS + while state != self._COMPLETE_STATE and max_states: + max_states -= 1 + command = self._STATE_TO_NEXT.get(state) + logging.debug('Ermine state is: %s', state) + if command is None: + raise NotImplementedError('Encountered invalid state: %s' % + state) + self._execute_tool(command) + _, state = self.status + + if not max_states: + raise RuntimeError('Did not transition to shell in %d attempts.' + ' Please file a bug.' % + self._MAX_STATE_TRANSITIONS) diff --git a/build/fuchsia/test/base_ermine_ctl_unittests.py b/build/fuchsia/test/base_ermine_ctl_unittests.py new file mode 100755 index 000000000000..c0d72fe0edf5 --- /dev/null +++ b/build/fuchsia/test/base_ermine_ctl_unittests.py @@ -0,0 +1,236 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Tests scenarios for ermine_ctl""" +import logging +import subprocess +import time +import unittest +import unittest.mock as mock + +from base_ermine_ctl import BaseErmineCtl + + +class BaseBaseErmineCtlTest(unittest.TestCase): + """Unit tests for BaseBaseErmineCtl interface.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.ermine_ctl = BaseErmineCtl() + + def _set_mock_proc(self, return_value: int): + """Set |execute_command_async|'s return value to a mocked subprocess.""" + self.ermine_ctl.execute_command_async = mock.MagicMock() + mock_proc = mock.create_autospec(subprocess.Popen, instance=True) + mock_proc.communicate.return_value = 'foo', 'stderr' + mock_proc.returncode = return_value + self.ermine_ctl.execute_command_async.return_value = mock_proc + + return mock_proc + + def test_check_exists(self): + """Test |exists| returns True if tool command succeeds (returns 0).""" + self._set_mock_proc(return_value=0) + + self.assertTrue(self.ermine_ctl.exists) + + # Modifying this will not result in a change in state due to caching. + self._set_mock_proc(return_value=42) + self.assertTrue(self.ermine_ctl.exists) + + def test_does_not_exist(self): + """Test |exists| returns False if tool command fails (returns != 0).""" + self._set_mock_proc(return_value=42) + + self.assertFalse(self.ermine_ctl.exists) + + def test_ready_raises_assertion_error_if_not_exist(self): + """Test |ready| raises AssertionError if tool does not exist.""" + self._set_mock_proc(return_value=42) + self.assertRaises(AssertionError, getattr, self.ermine_ctl, 'ready') + + def test_ready_returns_false_if_bad_status(self): + """Test |ready| return False if tool has a bad status.""" + with mock.patch.object( + BaseErmineCtl, 'status', + new_callable=mock.PropertyMock) as mock_status, \ + mock.patch.object(BaseErmineCtl, 'exists', + new_callable=mock.PropertyMock) as mock_exists: + mock_exists.return_value = True + mock_status.return_value = (1, 'FakeStatus') + self.assertFalse(self.ermine_ctl.ready) + + def test_ready_returns_true(self): + """Test |ready| return True if tool returns good status (rc = 0).""" + with mock.patch.object( + BaseErmineCtl, 'status', + new_callable=mock.PropertyMock) as mock_status, \ + mock.patch.object(BaseErmineCtl, 'exists', + new_callable=mock.PropertyMock) as mock_exists: + mock_exists.return_value = True + mock_status.return_value = (0, 'FakeStatus') + self.assertTrue(self.ermine_ctl.ready) + + def test_status_raises_assertion_error_if_dne(self): + """Test |status| returns |InvalidState| if tool does not exist.""" + with mock.patch.object(BaseErmineCtl, + 'exists', + new_callable=mock.PropertyMock) as mock_exists: + mock_exists.return_value = False + + self.assertRaises(AssertionError, getattr, self.ermine_ctl, + 'status') + + def test_status_returns_rc_and_stdout(self): + """Test |status| returns subprocess stdout and rc if tool exists.""" + with mock.patch.object(BaseErmineCtl, + 'exists', + new_callable=mock.PropertyMock) as _: + self._set_mock_proc(return_value=10) + + self.assertEqual(self.ermine_ctl.status, (10, 'foo')) + + def test_status_returns_timeout_state(self): + """Test |status| returns |Timeout| if exception is raised.""" + with mock.patch.object( + BaseErmineCtl, 'exists', new_callable=mock.PropertyMock) as _, \ + mock.patch.object(logging, 'warning') as _: + mock_proc = self._set_mock_proc(return_value=0) + mock_proc.wait.side_effect = subprocess.TimeoutExpired( + 'cmd', 'some timeout') + + self.assertEqual(self.ermine_ctl.status, (-1, 'Timeout')) + + def test_wait_until_ready_raises_assertion_error_if_tool_dne(self): + """Test |wait_until_ready| is returns false if tool does not exist.""" + with mock.patch.object(BaseErmineCtl, + 'exists', + new_callable=mock.PropertyMock) as mock_exists: + mock_exists.return_value = False + + self.assertRaises(AssertionError, self.ermine_ctl.wait_until_ready) + + def test_wait_until_ready_loops_until_ready(self): + """Test |wait_until_ready| loops until |ready| returns True.""" + with mock.patch.object(BaseErmineCtl, 'exists', + new_callable=mock.PropertyMock) as mock_exists, \ + mock.patch.object(time, 'sleep') as mock_sleep, \ + mock.patch.object(BaseErmineCtl, 'ready', + new_callable=mock.PropertyMock) as mock_ready: + mock_exists.return_value = True + mock_ready.side_effect = [False, False, False, True] + + self.ermine_ctl.wait_until_ready() + + self.assertEqual(mock_ready.call_count, 4) + self.assertEqual(mock_sleep.call_count, 3) + + def test_wait_until_ready_raises_assertion_error_if_attempts_exceeded( + self): + """Test |wait_until_ready| loops if |ready| is not True n attempts.""" + with mock.patch.object(BaseErmineCtl, 'exists', + new_callable=mock.PropertyMock) as mock_exists, \ + mock.patch.object(time, 'sleep') as mock_sleep, \ + mock.patch.object(BaseErmineCtl, 'ready', + new_callable=mock.PropertyMock) as mock_ready: + mock_exists.return_value = True + mock_ready.side_effect = [False] * 15 + [True] + + self.assertRaises(TimeoutError, self.ermine_ctl.wait_until_ready) + + self.assertEqual(mock_ready.call_count, 10) + self.assertEqual(mock_sleep.call_count, 10) + + def test_take_to_shell_raises_assertion_error_if_tool_dne(self): + """Test |take_to_shell| throws AssertionError if not ready is False.""" + with mock.patch.object(BaseErmineCtl, + 'exists', + new_callable=mock.PropertyMock) as mock_exists: + mock_exists.return_value = False + self.assertRaises(AssertionError, self.ermine_ctl.take_to_shell) + + def test_take_to_shell_exits_on_complete_state(self): + """Test |take_to_shell| exits with no calls if in completed state.""" + with mock.patch.object(BaseErmineCtl, + 'wait_until_ready') as mock_wait_ready, \ + mock.patch.object( + BaseErmineCtl, 'status', + new_callable=mock.PropertyMock) as mock_status: + mock_proc = self._set_mock_proc(return_value=52) + mock_wait_ready.return_value = True + mock_status.return_value = (0, 'Shell') + + self.ermine_ctl.take_to_shell() + + self.assertEqual(mock_proc.call_count, 0) + + def test_take_to_shell_invalid_state_raises_not_implemented_error(self): + """Test |take_to_shell| raises exception if invalid state is returned. + """ + with mock.patch.object(BaseErmineCtl, + 'wait_until_ready') as mock_wait_ready, \ + mock.patch.object( + BaseErmineCtl, 'status', + new_callable=mock.PropertyMock) as mock_status: + mock_wait_ready.return_value = True + mock_status.return_value = (0, 'SomeUnknownState') + + self.assertRaises(NotImplementedError, + self.ermine_ctl.take_to_shell) + + def test_take_to_shell_with_max_transitions_raises_runtime_error(self): + """Test |take_to_shell| raises exception on too many transitions. + + |take_to_shell| attempts to transition from one state to another. + After 5 attempts, if this does not end in the completed state, an + Exception is thrown. + """ + with mock.patch.object(BaseErmineCtl, + 'wait_until_ready') as mock_wait_ready, \ + mock.patch.object( + BaseErmineCtl, 'status', + new_callable=mock.PropertyMock) as mock_status: + mock_wait_ready.return_value = True + # Returns too many state transitions before CompleteState. + mock_status.side_effect = [(0, 'Unknown'), + (0, 'KnownWithPassword'), + (0, 'Unknown')] * 3 + [ + (0, 'CompleteState') + ] + self.assertRaises(RuntimeError, self.ermine_ctl.take_to_shell) + + def test_take_to_shell_executes_known_commands(self): + """Test |take_to_shell| executes commands if necessary. + + Some states can only be transitioned between with specific commands. + These are executed by |take_to_shell| until the final test |Shell| is + reached. + """ + with mock.patch.object(BaseErmineCtl, + 'wait_until_ready') as mock_wait_ready, \ + mock.patch.object( + BaseErmineCtl, 'status', + new_callable=mock.PropertyMock) as mock_status: + self._set_mock_proc(return_value=0) + mock_wait_ready.return_value = True + mock_status.side_effect = [(0, 'Unknown'), (0, 'SetPassword'), + (0, 'Shell')] + + self.ermine_ctl.take_to_shell() + + self.assertEqual(self.ermine_ctl.execute_command_async.call_count, + 2) + self.ermine_ctl.execute_command_async.assert_has_calls([ + mock.call(['erminectl', 'oobe', 'skip']), + mock.call().communicate(), + mock.call([ + 'erminectl', 'oobe', 'set_password', + 'workstation_test_password' + ]), + mock.call().communicate() + ]) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/fuchsia/test/common.py b/build/fuchsia/test/common.py new file mode 100644 index 000000000000..32785f15a6c4 --- /dev/null +++ b/build/fuchsia/test/common.py @@ -0,0 +1,617 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Common methods and variables used by Cr-Fuchsia testing infrastructure.""" + +import enum +import json +import logging +import os +import re +import signal +import shutil +import subprocess +import sys +import time + +from argparse import ArgumentParser +from typing import Iterable, List, Optional, Tuple + +from compatible_utils import get_ssh_prefix, get_host_arch + +DIR_SRC_ROOT = os.path.abspath( + os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir)) +IMAGES_ROOT = os.path.join(DIR_SRC_ROOT, 'third_party', 'fuchsia-sdk', + 'images') +REPO_ALIAS = 'fuchsia.com' +SDK_ROOT = os.path.join(DIR_SRC_ROOT, 'third_party', 'fuchsia-sdk', 'sdk') +SDK_TOOLS_DIR = os.path.join(SDK_ROOT, 'tools', get_host_arch()) +_ENABLE_ZEDBOOT = 'discovery.zedboot.enabled=true' +_FFX_TOOL = os.path.join(SDK_TOOLS_DIR, 'ffx') + +# This global variable is used to set the environment variable +# |FFX_ISOLATE_DIR| when running ffx commands in E2E testing scripts. +_FFX_ISOLATE_DIR = None + + +class TargetState(enum.Enum): + """State of a target.""" + UNKNOWN = enum.auto() + DISCONNECTED = enum.auto() + PRODUCT = enum.auto() + FASTBOOT = enum.auto() + ZEDBOOT = enum.auto() + + +class BootMode(enum.Enum): + """Specifies boot mode for device.""" + REGULAR = enum.auto() + RECOVERY = enum.auto() + BOOTLOADER = enum.auto() + + +_STATE_TO_BOOTMODE = { + TargetState.PRODUCT: BootMode.REGULAR, + TargetState.FASTBOOT: BootMode.BOOTLOADER, + TargetState.ZEDBOOT: BootMode.RECOVERY +} + +_BOOTMODE_TO_STATE = {value: key for key, value in _STATE_TO_BOOTMODE.items()} + + +class StateNotFoundError(Exception): + """Raised when target's state cannot be found.""" + + +class StateTransitionError(Exception): + """Raised when target does not transition to desired state.""" + + +def _state_string_to_state(state_str: str) -> TargetState: + state_str = state_str.strip().lower() + if state_str == 'product': + return TargetState.PRODUCT + if state_str == 'zedboot (r)': + return TargetState.ZEDBOOT + if state_str == 'fastboot': + return TargetState.FASTBOOT + if state_str == 'unknown': + return TargetState.UNKNOWN + if state_str == 'disconnected': + return TargetState.DISCONNECTED + + raise NotImplementedError(f'State {state_str} not supported') + + +def get_target_state(target_id: Optional[str], + serial_num: Optional[str], + num_attempts: int = 1) -> TargetState: + """Return state of target or the default target. + + Args: + target_id: Optional nodename of the target. If not given, default target + is used. + serial_num: Optional serial number of target. Only usable if device is + in fastboot. + num_attempts: Optional number of times to attempt getting status. + + Returns: + TargetState of the given node, if found. + + Raises: + StateNotFoundError: If target cannot be found, or default target is not + defined if |target_id| is not given. + """ + for i in range(num_attempts): + targets = json.loads( + run_ffx_command(('target', 'list'), + check=True, + configs=[_ENABLE_ZEDBOOT], + capture_output=True, + json_out=True).stdout.strip()) + for target in targets: + if target_id is None and target['is_default']: + return _state_string_to_state(target['target_state']) + if target_id == target['nodename']: + return _state_string_to_state(target['target_state']) + if serial_num == target['serial']: + # Should only return Fastboot. + return _state_string_to_state(target['target_state']) + # Do not sleep for last attempt. + if i < num_attempts - 1: + time.sleep(10) + + # Could not find a state for given target. + error_target = target_id + if target_id is None: + error_target = 'default target' + + raise StateNotFoundError(f'Could not find state for {error_target}.') + + +def set_ffx_isolate_dir(isolate_dir: str) -> None: + """Overwrites |_FFX_ISOLATE_DIR|.""" + + global _FFX_ISOLATE_DIR # pylint: disable=global-statement + _FFX_ISOLATE_DIR = isolate_dir + + +def get_host_tool_path(tool): + """Get a tool from the SDK.""" + + return os.path.join(SDK_TOOLS_DIR, tool) + + +def get_host_os(): + """Get host operating system.""" + + host_platform = sys.platform + if host_platform.startswith('linux'): + return 'linux' + if host_platform.startswith('darwin'): + return 'mac' + raise Exception('Unsupported host platform: %s' % host_platform) + + +def make_clean_directory(directory_name): + """If the directory exists, delete it and remake with no contents.""" + + if os.path.exists(directory_name): + shutil.rmtree(directory_name) + os.mkdir(directory_name) + + +def _get_daemon_status(): + """Determines daemon status via `ffx daemon socket`. + + Returns: + dict of status of the socket. Status will have a key Running or + NotRunning to indicate if the daemon is running. + """ + status = json.loads( + run_ffx_command(('daemon', 'socket'), + check=True, + capture_output=True, + json_out=True, + suppress_repair=True).stdout.strip()) + return status.get('pid', {}).get('status', {'NotRunning': True}) + + +def _is_daemon_running(): + return 'Running' in _get_daemon_status() + + +def check_ssh_config_file() -> None: + """Checks for ssh keys and generates them if they are missing.""" + + script_path = os.path.join(SDK_ROOT, 'bin', 'fuchsia-common.sh') + check_cmd = ['bash', '-c', f'. {script_path}; check-fuchsia-ssh-config'] + subprocess.run(check_cmd, check=True) + + +def _wait_for_daemon(start=True, timeout_seconds=100): + """Waits for daemon to reach desired state in a polling loop. + + Sleeps for 5s between polls. + + Args: + start: bool. Indicates to wait for daemon to start up. If False, + indicates waiting for daemon to die. + timeout_seconds: int. Number of seconds to wait for the daemon to reach + the desired status. + Raises: + TimeoutError: if the daemon does not reach the desired state in time. + """ + wanted_status = 'start' if start else 'stop' + sleep_period_seconds = 5 + attempts = int(timeout_seconds / sleep_period_seconds) + for i in range(attempts): + if _is_daemon_running() == start: + return + if i != attempts: + logging.info('Waiting for daemon to %s...', wanted_status) + time.sleep(sleep_period_seconds) + + raise TimeoutError(f'Daemon did not {wanted_status} in time.') + + +def _run_repair_command(output): + """Scans |output| for a self-repair command to run and, if found, runs it. + + Returns: + True if a repair command was found and ran successfully. False otherwise. + """ + # Check for a string along the lines of: + # "Run `ffx doctor --restart-daemon` for further diagnostics." + match = re.search('`ffx ([^`]+)`', output) + if not match or len(match.groups()) != 1: + return False # No repair command found. + args = match.groups()[0].split() + + try: + run_ffx_command(args, suppress_repair=True) + # Need the daemon to be up at the end of this. + _wait_for_daemon(start=True) + except subprocess.CalledProcessError: + return False # Repair failed. + return True # Repair succeeded. + + +def run_ffx_command(cmd: Iterable[str], + target_id: Optional[str] = None, + check: bool = True, + suppress_repair: bool = False, + configs: Optional[List[str]] = None, + json_out: bool = False, + **kwargs) -> subprocess.CompletedProcess: + """Runs `ffx` with the given arguments, waiting for it to exit. + + If `ffx` exits with a non-zero exit code, the output is scanned for a + recommended repair command (e.g., "Run `ffx doctor --restart-daemon` for + further diagnostics."). If such a command is found, it is run and then the + original command is retried. This behavior can be suppressed via the + `suppress_repair` argument. + + Args: + cmd: A sequence of arguments to ffx. + target_id: Whether to execute the command for a specific target. The + target_id could be in the form of a nodename or an address. + check: If True, CalledProcessError is raised if ffx returns a non-zero + exit code. + suppress_repair: If True, do not attempt to find and run a repair + command. + configs: A list of configs to be applied to the current command. + json_out: Have command output returned as JSON. Must be parsed by + caller. + Returns: + A CompletedProcess instance + Raises: + CalledProcessError if |check| is true. + """ + + ffx_cmd = [_FFX_TOOL] + if json_out: + ffx_cmd.extend(('--machine', 'json')) + if target_id: + ffx_cmd.extend(('--target', target_id)) + if configs: + for config in configs: + ffx_cmd.extend(('--config', config)) + ffx_cmd.extend(cmd) + env = os.environ + if _FFX_ISOLATE_DIR: + env['FFX_ISOLATE_DIR'] = _FFX_ISOLATE_DIR + + try: + if not suppress_repair: + # If we want to repair, we need to capture output in STDOUT and + # STDERR. This could conflict with expectations of the caller. + output_captured = kwargs.get('capture_output') or ( + kwargs.get('stdout') and kwargs.get('stderr')) + if not output_captured: + # Force output to combine into STDOUT. + kwargs['stdout'] = subprocess.PIPE + kwargs['stderr'] = subprocess.STDOUT + return subprocess.run(ffx_cmd, + check=check, + encoding='utf-8', + env=env, + **kwargs) + except subprocess.CalledProcessError as cpe: + logging.error('%s %s failed with returncode %s.', + os.path.relpath(_FFX_TOOL), + subprocess.list2cmdline(ffx_cmd[1:]), cpe.returncode) + if cpe.output: + logging.error('stdout of the command: %s', cpe.output) + if suppress_repair or (cpe.output + and not _run_repair_command(cpe.output)): + raise + + # If the original command failed but a repair command was found and + # succeeded, try one more time with the original command. + return run_ffx_command(cmd, target_id, check, True, configs, json_out, + **kwargs) + + +def run_continuous_ffx_command(cmd: Iterable[str], + target_id: Optional[str] = None, + encoding: Optional[str] = 'utf-8', + **kwargs) -> subprocess.Popen: + """Runs an ffx command asynchronously.""" + ffx_cmd = [_FFX_TOOL] + if target_id: + ffx_cmd.extend(('--target', target_id)) + ffx_cmd.extend(cmd) + return subprocess.Popen(ffx_cmd, encoding=encoding, **kwargs) + + +def read_package_paths(out_dir: str, pkg_name: str) -> List[str]: + """ + Returns: + A list of the absolute path to all FAR files the package depends on. + """ + with open( + os.path.join(DIR_SRC_ROOT, out_dir, 'gen', 'package_metadata', + f'{pkg_name}.meta')) as meta_file: + data = json.load(meta_file) + packages = [] + for package in data['packages']: + packages.append(os.path.join(DIR_SRC_ROOT, out_dir, package)) + return packages + + +def register_common_args(parser: ArgumentParser) -> None: + """Register commonly used arguments.""" + common_args = parser.add_argument_group('common', 'common arguments') + common_args.add_argument( + '--out-dir', + '-C', + type=os.path.realpath, + help='Path to the directory in which build files are located. ') + + +def register_device_args(parser: ArgumentParser) -> None: + """Register device arguments.""" + device_args = parser.add_argument_group('device', 'device arguments') + device_args.add_argument('--target-id', + default=os.environ.get('FUCHSIA_NODENAME'), + help=('Specify the target device. This could be ' + 'a node-name (e.g. fuchsia-emulator) or an ' + 'an ip address along with an optional port ' + '(e.g. [fe80::e1c4:fd22:5ee5:878e]:22222, ' + '1.2.3.4, 1.2.3.4:33333). If unspecified, ' + 'the default target in ffx will be used.')) + + +def register_log_args(parser: ArgumentParser) -> None: + """Register commonly used arguments.""" + + log_args = parser.add_argument_group('logging', 'logging arguments') + log_args.add_argument('--logs-dir', + type=os.path.realpath, + help=('Directory to write logs to.')) + + +def get_component_uri(package: str) -> str: + """Retrieve the uri for a package.""" + return f'fuchsia-pkg://{REPO_ALIAS}/{package}#meta/{package}.cm' + + +def resolve_packages(packages: List[str], target_id: Optional[str]) -> None: + """Ensure that all |packages| are installed on a device.""" + + ssh_prefix = get_ssh_prefix(get_ssh_address(target_id)) + subprocess.run(ssh_prefix + ['--', 'pkgctl', 'gc'], check=False) + + for package in packages: + resolve_cmd = [ + '--', 'pkgctl', 'resolve', + 'fuchsia-pkg://%s/%s' % (REPO_ALIAS, package) + ] + retry_command(ssh_prefix + resolve_cmd) + + +def retry_command(cmd: List[str], retries: int = 2, + **kwargs) -> Optional[subprocess.CompletedProcess]: + """Helper function for retrying a subprocess.run command.""" + + for i in range(retries): + if i == retries - 1: + proc = subprocess.run(cmd, **kwargs, check=True) + return proc + proc = subprocess.run(cmd, **kwargs, check=False) + if proc.returncode == 0: + return proc + time.sleep(3) + return None + + +def get_ssh_address(target_id: Optional[str]) -> str: + """Determines SSH address for given target.""" + return run_ffx_command(('target', 'get-ssh-address'), + target_id, + capture_output=True).stdout.strip() + + +def find_in_dir(target_name: str, parent_dir: str) -> Optional[str]: + """Finds path in SDK. + + Args: + target_name: Name of target to find, as a string. + parent_dir: Directory to start search in. + + Returns: + Full path to the target, None if not found. + """ + # Doesn't make sense to look for a full path. Only extract the basename. + target_name = os.path.basename(target_name) + for root, dirs, _ in os.walk(parent_dir): + if target_name in dirs: + return os.path.abspath(os.path.join(root, target_name)) + + return None + + +def find_image_in_sdk(product_name: str) -> Optional[str]: + """Finds image dir in SDK for product given. + + Args: + product_name: Name of product's image directory to find. + + Returns: + Full path to the target, None if not found. + """ + top_image_dir = os.path.join(SDK_ROOT, os.pardir, 'images') + path = find_in_dir(product_name, parent_dir=top_image_dir) + if path: + return find_in_dir('images', parent_dir=path) + return path + + +def catch_sigterm() -> None: + """Catches the kill signal and allows the process to exit cleanly.""" + def _sigterm_handler(*_): + sys.exit(0) + + signal.signal(signal.SIGTERM, _sigterm_handler) + + +def get_system_info(target: Optional[str] = None) -> Tuple[str, str]: + """Retrieves installed OS version frm device. + + Returns: + Tuple of strings, containing {product, version number), or a pair of + empty strings to indicate an error. + """ + info_cmd = run_ffx_command(('target', 'show', '--json'), + target_id=target, + capture_output=True, + check=False) + if info_cmd.returncode == 0: + info_json = json.loads(info_cmd.stdout.strip()) + for info in info_json: + if info['title'] == 'Build': + return (info['child'][1]['value'], info['child'][0]['value']) + + # If the information was not retrieved, return empty strings to indicate + # unknown system info. + return ('', '') + + +def boot_device(target_id: Optional[str], + mode: BootMode, + serial_num: Optional[str] = None, + must_boot: bool = False) -> None: + """Boot device into desired mode, with fallback to SSH on failure. + + Args: + target_id: Optional target_id of device. + mode: Desired boot mode. + must_boot: Forces device to boot, regardless of current state. + Raises: + StateTransitionError: When final state of device is not desired. + """ + # Skip boot call if already in the state and not skipping check. + state = get_target_state(target_id, serial_num, num_attempts=3) + wanted_state = _BOOTMODE_TO_STATE.get(mode) + if not must_boot: + logging.debug('Current state %s. Want state %s', str(state), + str(wanted_state)) + must_boot = state != wanted_state + + if not must_boot: + logging.debug('Skipping boot - already in good state') + return + + def _reboot(reboot_cmd, current_state: TargetState): + reboot_cmd() + local_state = None + # Check that we transition out of current state. + for _ in range(30): + try: + local_state = get_target_state(target_id, serial_num) + if local_state != current_state: + # Changed states - can continue + break + except StateNotFoundError: + logging.debug('Device disconnected...') + if current_state != TargetState.DISCONNECTED: + # Changed states - can continue + break + finally: + time.sleep(2) + else: + logging.warning( + 'Device did not change from initial state. Exiting early') + return local_state or TargetState.DISCONNECTED + + # Now we want to transition to the new state. + for _ in range(90): + try: + local_state = get_target_state(target_id, serial_num) + if local_state == wanted_state: + return local_state + except StateNotFoundError: + logging.warning('Could not find target state.' + ' Sleeping then retrying...') + finally: + time.sleep(2) + return local_state or TargetState.DISCONNECTED + + state = _reboot( + (lambda: _boot_device_ffx(target_id, serial_num, state, mode)), state) + + if state == TargetState.DISCONNECTED: + raise StateNotFoundError('Target could not be found!') + + if state == wanted_state: + return + + logging.warning( + 'Booting with FFX to %s did not succeed. Attempting with DM', mode) + + # Fallback to SSH, with no retry if we tried with ffx.: + state = _reboot( + (lambda: _boot_device_dm(target_id, serial_num, state, mode)), state) + + if state != wanted_state: + raise StateTransitionError( + f'Could not get device to desired state. Wanted {wanted_state},' + f' got {state}') + logging.debug('Got desired state: %s', state) + + +def _boot_device_ffx(target_id: Optional[str], serial_num: Optional[str], + current_state: TargetState, mode: BootMode): + cmd = ['target', 'reboot'] + if mode == BootMode.REGULAR: + logging.info('Triggering regular boot') + elif mode == BootMode.RECOVERY: + cmd.append('-r') + elif mode == BootMode.BOOTLOADER: + cmd.append('-b') + else: + raise NotImplementedError(f'BootMode {mode} not supported') + + logging.debug('FFX reboot with command [%s]', ' '.join(cmd)) + if current_state == TargetState.FASTBOOT: + + run_ffx_command(cmd, + configs=[_ENABLE_ZEDBOOT], + target_id=serial_num, + check=False) + else: + run_ffx_command(cmd, + configs=[_ENABLE_ZEDBOOT], + target_id=target_id, + check=False) + + +def _boot_device_dm(target_id: Optional[str], serial_num: Optional[str], + current_state: TargetState, mode: BootMode): + # Can only use DM if device is in regular boot. + if current_state != TargetState.PRODUCT: + if mode == BootMode.REGULAR: + raise StateTransitionError('Cannot boot to Regular via DM - ' + 'FFX already failed to do so.') + # Boot to regular. + _boot_device_ffx(target_id, serial_num, current_state, + BootMode.REGULAR) + + ssh_prefix = get_ssh_prefix(get_ssh_address(target_id)) + + reboot_cmd = None + + if mode == BootMode.REGULAR: + reboot_cmd = 'reboot' + elif mode == BootMode.RECOVERY: + reboot_cmd = 'reboot-recovery' + elif mode == BootMode.BOOTLOADER: + reboot_cmd = 'reboot-bootloader' + else: + raise NotImplementedError(f'BootMode {mode} not supported') + + # Boot commands can fail due to SSH connections timeout. + full_cmd = ssh_prefix + ['--', 'dm', reboot_cmd] + logging.debug('DM reboot with command [%s]', ' '.join(full_cmd)) + subprocess.run(full_cmd, check=False) diff --git a/build/fuchsia/test/common_unittests.py b/build/fuchsia/test/common_unittests.py new file mode 100755 index 000000000000..4e419c902574 --- /dev/null +++ b/build/fuchsia/test/common_unittests.py @@ -0,0 +1,54 @@ +#!/usr/bin/env vpython3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""File for testing common.py.""" + +import os +import tempfile +import unittest +import unittest.mock as mock + +import common + + +@unittest.skipIf(os.name == 'nt', 'Fuchsia tests not supported on Windows') +class CommonTest(unittest.TestCase): + """Test common.py methods.""" + def test_find_in_dir_returns_file_or_dir_if_searching(self) -> None: + """Test |find_in_dir| returns files if searching for file, or None.""" + # Make the directory structure. + with tempfile.TemporaryDirectory() as tmp_dir: + with tempfile.NamedTemporaryFile(dir=tmp_dir) as tmp_file, \ + tempfile.TemporaryDirectory(dir=tmp_dir) as inner_tmp_dir: + + # Structure is now: + # temp_dir/ + # temp_dir/inner_dir1 + # temp_dir/tempfile1 + # File is not a dir, so returns None. + self.assertIsNone( + common.find_in_dir(os.path.basename(tmp_file.name), + parent_dir=tmp_dir)) + + # Repeat for directory. + self.assertEqual( + common.find_in_dir(inner_tmp_dir, parent_dir=tmp_dir), + inner_tmp_dir) + + def test_find_image_in_sdk_searches_images_in_product_bundle(self): + """Test |find_image_in_sdk| searches for 'images' if product-bundle.""" + with tempfile.TemporaryDirectory() as tmp_dir: + os.makedirs(os.path.join(tmp_dir, 'sdk'), exist_ok=True) + os.makedirs(os.path.join(tmp_dir, 'images', 'workstation-product', + 'images'), + exist_ok=True) + with mock.patch('common.SDK_ROOT', os.path.join(tmp_dir, 'sdk')): + self.assertEqual( + common.find_image_in_sdk('workstation-product'), + os.path.join(tmp_dir, 'images', 'workstation-product', + 'images')) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/fuchsia/test/compatible_utils.py b/build/fuchsia/test/compatible_utils.py new file mode 100644 index 000000000000..b917a656d785 --- /dev/null +++ b/build/fuchsia/test/compatible_utils.py @@ -0,0 +1,207 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Functions used in both v1 and v2 scripts.""" + +import os +import platform +import re +import stat +import subprocess + +from typing import Iterable, List, Optional, Tuple + + +# File indicating version of an image downloaded to the host +_BUILD_ARGS = "buildargs.gn" +_ARGS_FILE = 'args.gn' + +_FILTER_DIR = 'testing/buildbot/filters' +_SSH_KEYS = os.path.expanduser('~/.ssh/fuchsia_authorized_keys') + + +class VersionNotFoundError(Exception): + """Thrown when version info cannot be retrieved from device.""" + + +def get_ssh_keys() -> str: + """Returns path of Fuchsia ssh keys.""" + + return _SSH_KEYS + + +def running_unattended() -> bool: + """Returns true if running non-interactively. + + When running unattended, confirmation prompts and the like are suppressed. + """ + + # TODO(crbug/1401387): Change to mixin based approach. + return 'SWARMING_SERVER' in os.environ + + +def get_host_arch() -> str: + """Retrieve CPU architecture of the host machine. """ + host_arch = platform.machine() + # platform.machine() returns AMD64 on 64-bit Windows. + if host_arch in ['x86_64', 'AMD64']: + return 'x64' + if host_arch in ['aarch64', 'arm64']: + return 'arm64' + raise NotImplementedError('Unsupported host architecture: %s' % host_arch) + + +def add_exec_to_file(file: str) -> None: + """Add execution bits to a file. + + Args: + file: path to the file. + """ + file_stat = os.stat(file) + os.chmod(file, file_stat.st_mode | stat.S_IXUSR) + + +def _add_exec_to_pave_binaries(system_image_dir: str): + """Add exec to required pave files. + + The pave files may vary depending if a product-bundle or a prebuilt images + directory is being used. + Args: + system_image_dir: string path to the directory containing the pave files. + """ + pb_files = [ + 'pave.sh', + os.path.join(f'host_{get_host_arch()}', 'bootserver') + ] + image_files = [ + 'pave.sh', + os.path.join(f'bootserver.exe.linux-{get_host_arch()}') + ] + use_pb_files = os.path.exists(os.path.join(system_image_dir, pb_files[1])) + for f in pb_files if use_pb_files else image_files: + add_exec_to_file(os.path.join(system_image_dir, f)) + + +def pave(image_dir: str, target_id: Optional[str])\ + -> subprocess.CompletedProcess: + """"Pave a device using the pave script inside |image_dir|.""" + _add_exec_to_pave_binaries(image_dir) + pave_command = [ + os.path.join(image_dir, 'pave.sh'), '--authorized-keys', + get_ssh_keys(), '-1' + ] + if target_id: + pave_command.extend(['-n', target_id]) + return subprocess.run(pave_command, check=True, text=True, timeout=300) + + +def parse_host_port(host_port_pair: str) -> Tuple[str, int]: + """Parses a host name or IP address and a port number from a string of + any of the following forms: + - hostname:port + - IPv4addy:port + - [IPv6addy]:port + + Returns: + A tuple of the string host name/address and integer port number. + + Raises: + ValueError if `host_port_pair` does not contain a colon or if the + substring following the last colon cannot be converted to an int. + """ + + host, port = host_port_pair.rsplit(':', 1) + + # Strip the brackets if the host looks like an IPv6 address. + if len(host) >= 4 and host[0] == '[' and host[-1] == ']': + host = host[1:-1] + return (host, int(port)) + + +def get_ssh_prefix(host_port_pair: str) -> List[str]: + """Get the prefix of a barebone ssh command.""" + + ssh_addr, ssh_port = parse_host_port(host_port_pair) + return [ + 'ssh', '-F', + os.path.expanduser('~/.fuchsia/sshconfig'), ssh_addr, '-p', + str(ssh_port) + ] + + +def install_symbols(package_paths: Iterable[str], + fuchsia_out_dir: str) -> None: + """Installs debug symbols for a package into the GDB-standard symbol + directory located in fuchsia_out_dir.""" + + symbol_root = os.path.join(fuchsia_out_dir, '.build-id') + for path in package_paths: + package_dir = os.path.dirname(path) + ids_txt_path = os.path.join(package_dir, 'ids.txt') + with open(ids_txt_path, 'r') as f: + for entry in f: + build_id, binary_relpath = entry.strip().split(' ') + binary_abspath = os.path.abspath( + os.path.join(package_dir, binary_relpath)) + symbol_dir = os.path.join(symbol_root, build_id[:2]) + symbol_file = os.path.join(symbol_dir, build_id[2:] + '.debug') + if not os.path.exists(symbol_dir): + os.makedirs(symbol_dir) + + if os.path.islink(symbol_file) or os.path.exists(symbol_file): + # Clobber the existing entry to ensure that the symlink's + # target is up to date. + os.unlink(symbol_file) + os.symlink(os.path.relpath(binary_abspath, symbol_dir), + symbol_file) + + +# TODO(crbug.com/1279803): Until one can send files to the device when running +# a test, filter files must be read from the test package. +def map_filter_file_to_package_file(filter_file: str) -> str: + """Returns the path to |filter_file| within the test component's package.""" + + if not _FILTER_DIR in filter_file: + raise ValueError('CFv2 tests only support registered filter files ' + 'present in the test package') + return '/pkg/' + filter_file[filter_file.index(_FILTER_DIR):] + + +def get_sdk_hash(system_image_dir: str) -> Tuple[str, str]: + """Read version of hash in pre-installed package directory. + Returns: + Tuple of (product, version) of image to be installed. + Raises: + VersionNotFoundError: if contents of buildargs.gn cannot be found or the + version number cannot be extracted. + """ + + # TODO(crbug.com/1261961): Stop processing buildargs.gn directly. + args_file = os.path.join(system_image_dir, _BUILD_ARGS) + if not os.path.exists(args_file): + args_file = os.path.join(system_image_dir, _ARGS_FILE) + + if not os.path.exists(args_file): + raise VersionNotFoundError( + f'Dir {system_image_dir} did not contain {_BUILD_ARGS} or ' + f'{_ARGS_FILE}') + + with open(args_file) as f: + contents = f.readlines() + if not contents: + raise VersionNotFoundError('Could not retrieve %s' % args_file) + version_key = 'build_info_version' + product_key = 'build_info_product' + info_keys = [product_key, version_key] + version_info = {} + for line in contents: + for key in info_keys: + match = re.match(r'%s = "(.*)"' % key, line) + if match: + version_info[key] = match.group(1) + if not (version_key in version_info and product_key in version_info): + raise VersionNotFoundError( + 'Could not extract version info from %s. Contents: %s' % + (args_file, contents)) + + return (version_info[product_key], version_info[version_key]) diff --git a/build/fuchsia/test/compatible_utils_unittests.py b/build/fuchsia/test/compatible_utils_unittests.py new file mode 100755 index 000000000000..02815921c2cb --- /dev/null +++ b/build/fuchsia/test/compatible_utils_unittests.py @@ -0,0 +1,238 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""File for testing compatible_utils.py.""" + +import io +import os +import stat +import tempfile +import unittest +import unittest.mock as mock + +import compatible_utils + + +@unittest.skipIf(os.name == 'nt', 'Fuchsia tests not supported on Windows') +class CompatibleUtilsTest(unittest.TestCase): + """Test compatible_utils.py methods.""" + + def test_running_unattended_returns_true_if_headless_set(self) -> None: + """Test |running_unattended| returns True if CHROME_HEADLESS is set.""" + with mock.patch('os.environ', {'SWARMING_SERVER': 0}): + self.assertTrue(compatible_utils.running_unattended()) + + with mock.patch('os.environ', {'FOO_HEADLESS': 0}): + self.assertFalse(compatible_utils.running_unattended()) + + def test_get_host_arch(self) -> None: + """Test |get_host_arch| gets the host architecture and throws + exceptions on errors.""" + supported_arches = ['x86_64', 'AMD64', 'aarch64'] + with mock.patch('platform.machine', side_effect=supported_arches): + self.assertEqual(compatible_utils.get_host_arch(), 'x64') + self.assertEqual(compatible_utils.get_host_arch(), 'x64') + self.assertEqual(compatible_utils.get_host_arch(), 'arm64') + + with mock.patch('platform.machine', return_value=['fake-arch']), \ + self.assertRaises(NotImplementedError): + compatible_utils.get_host_arch() + + def test_add_exec_to_file(self) -> None: + """Test |add_exec_to_file| adds executable bit to file.""" + with tempfile.NamedTemporaryFile() as f: + original_stat = os.stat(f.name).st_mode + self.assertFalse(original_stat & stat.S_IXUSR) + + compatible_utils.add_exec_to_file(f.name) + + new_stat = os.stat(f.name).st_mode + self.assertTrue(new_stat & stat.S_IXUSR) + + # pylint: disable=no-self-use + def test_pave_adds_exec_to_binary_files(self) -> None: + """Test |pave| calls |add_exec_to_file| on necessary files.""" + with mock.patch('os.path.exists', return_value=True), \ + mock.patch('compatible_utils.add_exec_to_file') as mock_exec, \ + mock.patch('platform.machine', return_value='x86_64'), \ + mock.patch('subprocess.run'): + compatible_utils.pave('some/path/to/dir', 'some-target') + + mock_exec.assert_has_calls([ + mock.call('some/path/to/dir/pave.sh'), + mock.call('some/path/to/dir/host_x64/bootserver') + ], + any_order=True) + + def test_pave_adds_exec_to_binary_files_if_pb_set_not_found(self) -> None: + """Test |pave| calls |add_exec_to_file| on necessary files. + + Checks if current product-bundle files exist. If not, defaults to + prebuilt-images set. + """ + with mock.patch('os.path.exists', return_value=False), \ + mock.patch('compatible_utils.add_exec_to_file') as mock_exec, \ + mock.patch('platform.machine', return_value='x86_64'), \ + mock.patch('subprocess.run'): + compatible_utils.pave('some/path/to/dir', 'some-target') + + mock_exec.assert_has_calls([ + mock.call('some/path/to/dir/pave.sh'), + mock.call('some/path/to/dir/bootserver.exe.linux-x64') + ], + any_order=True) + + def test_pave_adds_target_id_if_given(self) -> None: + """Test |pave| adds target-id to the arguments.""" + with mock.patch('os.path.exists', return_value=False), \ + mock.patch('compatible_utils.add_exec_to_file'), \ + mock.patch('platform.machine', return_value='x86_64'), \ + mock.patch('compatible_utils.get_ssh_keys', + return_value='authorized-keys-file'), \ + mock.patch('subprocess.run') as mock_subproc: + mock_subproc.reset_mock() + compatible_utils.pave('some/path/to/dir', 'some-target') + + mock_subproc.assert_called_once_with([ + 'some/path/to/dir/pave.sh', '--authorized-keys', + 'authorized-keys-file', '-1', '-n', 'some-target' + ], + check=True, + text=True, + timeout=300) + + # pylint: disable=no-self-use + + def test_parse_host_port_splits_address_and_strips_brackets(self) -> None: + """Test |parse_host_port| splits ipv4 and ipv6 addresses correctly.""" + self.assertEqual(compatible_utils.parse_host_port('hostname:55'), + ('hostname', 55)) + self.assertEqual(compatible_utils.parse_host_port('192.168.42.40:443'), + ('192.168.42.40', 443)) + self.assertEqual( + compatible_utils.parse_host_port('[2001:db8::1]:8080'), + ('2001:db8::1', 8080)) + + def test_map_filter_filter_file_throws_value_error_if_wrong_path(self + ) -> None: + """Test |map_filter_file| throws ValueError if path is missing + FILTER_DIR.""" + with self.assertRaises(ValueError): + compatible_utils.map_filter_file_to_package_file('foo') + + with self.assertRaises(ValueError): + compatible_utils.map_filter_file_to_package_file('some/other/path') + + with self.assertRaises(ValueError): + compatible_utils.map_filter_file_to_package_file('filters/file') + + # No error. + compatible_utils.map_filter_file_to_package_file( + 'testing/buildbot/filters/some.filter') + + def test_map_filter_filter_replaces_filter_dir_with_pkg_path(self) -> None: + """Test |map_filter_file| throws ValueError if path is missing + FILTER_DIR.""" + self.assertEqual( + '/pkg/testing/buildbot/filters/some.filter', + compatible_utils.map_filter_file_to_package_file( + 'foo/testing/buildbot/filters/some.filter')) + + def test_get_sdk_hash_fallsback_to_args_file_if_buildargs_dne(self + ) -> None: + """Test |get_sdk_hash| checks if buildargs.gn exists. + + If it does not, fallsback to args.gn. This should raise an exception + as it does not exist. + """ + with mock.patch('os.path.exists', return_value=False) as mock_exists, \ + self.assertRaises(compatible_utils.VersionNotFoundError): + compatible_utils.get_sdk_hash('some/image/dir') + mock_exists.assert_has_calls([ + mock.call('some/image/dir/buildargs.gn'), + mock.call('some/image/dir/args.gn') + ]) + + def test_get_sdk_hash_parse_contents_of_args_file(self) -> None: + """Test |get_sdk_hash| parses buildargs contents correctly.""" + build_args_test_contents = """ +build_info_board = "chromebook-x64" +build_info_product = "workstation_eng" +build_info_version = "10.20221114.2.1" +universe_package_labels += [] +""" + with mock.patch('os.path.exists', return_value=True), \ + mock.patch('builtins.open', + return_value=io.StringIO(build_args_test_contents)): + self.assertEqual(compatible_utils.get_sdk_hash('some/dir'), + ('workstation_eng', '10.20221114.2.1')) + + def test_get_sdk_hash_raises_error_if_keys_missing(self) -> None: + """Test |get_sdk_hash| raises VersionNotFoundError if missing keys""" + build_args_test_contents = """ +import("//boards/chromebook-x64.gni") +import("//products/workstation_eng.gni") +cxx_rbe_enable = true +host_labels += [ "//bundles/infra/build" ] +universe_package_labels += [] +""" + with mock.patch('os.path.exists', return_value=True), \ + mock.patch( + 'builtins.open', + return_value=io.StringIO(build_args_test_contents)), \ + self.assertRaises(compatible_utils.VersionNotFoundError): + compatible_utils.get_sdk_hash('some/dir') + + def test_get_sdk_hash_raises_error_if_contents_empty(self) -> None: + """Test |get_sdk_hash| raises VersionNotFoundError if no contents.""" + with mock.patch('os.path.exists', return_value=True), \ + mock.patch('builtins.open', return_value=io.StringIO("")), \ + self.assertRaises(compatible_utils.VersionNotFoundError): + compatible_utils.get_sdk_hash('some/dir') + + def trim_noop_prefixes(self, path): + """Helper function to trim no-op path name prefixes that are + introduced by os.path.realpath on some platforms. These break + the unit tests, but have no actual effect on behavior.""" + # These must all end in the path separator character for the + # string length computation to be correct on all platforms. + noop_prefixes = ['/private/'] + for prefix in noop_prefixes: + if path.startswith(prefix): + return path[len(prefix) - 1:] + return path + + def test_install_symbols(self): + + """Test |install_symbols|.""" + + with tempfile.TemporaryDirectory() as fuchsia_out_dir: + build_id = 'test_build_id' + symbol_file = os.path.join(fuchsia_out_dir, '.build-id', + build_id[:2], build_id[2:] + '.debug') + id_path = os.path.join(fuchsia_out_dir, 'ids.txt') + try: + binary_relpath = 'path/to/binary' + with open(id_path, 'w') as f: + f.write(f'{build_id} {binary_relpath}') + compatible_utils.install_symbols([id_path], fuchsia_out_dir) + self.assertTrue(os.path.islink(symbol_file)) + self.assertEqual( + self.trim_noop_prefixes(os.path.realpath(symbol_file)), + os.path.join(fuchsia_out_dir, binary_relpath)) + + new_binary_relpath = 'path/to/new/binary' + with open(id_path, 'w') as f: + f.write(f'{build_id} {new_binary_relpath}') + compatible_utils.install_symbols([id_path], fuchsia_out_dir) + self.assertTrue(os.path.islink(symbol_file)) + self.assertEqual( + self.trim_noop_prefixes(os.path.realpath(symbol_file)), + os.path.join(fuchsia_out_dir, new_binary_relpath)) + finally: + os.remove(id_path) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/fuchsia/test/coveragetest.py b/build/fuchsia/test/coveragetest.py new file mode 100755 index 000000000000..3a82e53c2902 --- /dev/null +++ b/build/fuchsia/test/coveragetest.py @@ -0,0 +1,59 @@ +#!/usr/bin/env vpython3 +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Ensure files in the directory are thoroughly tested.""" + +import importlib +import io +import os +import sys +import unittest + +import coverage # pylint: disable=import-error + +# The files need to have sufficient coverages. +COVERED_FILES = [ + 'compatible_utils.py', 'deploy_to_fuchsia.py', 'flash_device.py', + 'log_manager.py', 'publish_package.py', 'serve_repo.py', 'test_server.py' +] + +# The files will be tested without coverage requirements. +TESTED_FILES = ['common.py', 'ffx_emulator.py'] + + +def main(): + """Gather coverage data, ensure included files are 100% covered.""" + + # Fuchsia tests not supported on Windows + if os.name == 'nt': + return 0 + + cov = coverage.coverage(data_file=None, + include=COVERED_FILES, + config_file=True) + cov.start() + + for file in COVERED_FILES + TESTED_FILES: + print('Testing ' + file + ' ...') + # pylint: disable=import-outside-toplevel + # import tests after coverage start to also cover definition lines. + module = importlib.import_module(file.replace('.py', '_unittests')) + # pylint: enable=import-outside-toplevel + + tests = unittest.TestLoader().loadTestsFromModule(module) + if not unittest.TextTestRunner().run(tests).wasSuccessful(): + return 1 + + cov.stop() + outf = io.StringIO() + percentage = cov.report(file=outf, show_missing=True) + if int(percentage) != 100: + print(outf.getvalue()) + print('FATAL: Insufficient coverage (%.f%%)' % int(percentage)) + return 1 + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/fuchsia/test/deploy_to_fuchsia.py b/build/fuchsia/test/deploy_to_fuchsia.py new file mode 100755 index 000000000000..41b92aac07d6 --- /dev/null +++ b/build/fuchsia/test/deploy_to_fuchsia.py @@ -0,0 +1,44 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""A script for deploying Chrome binaries to a Fuchsia checkout.""" + +import argparse +import os +import sys + +from common import read_package_paths, register_common_args +from compatible_utils import install_symbols +from publish_package import publish_packages + + +def register_fuchsia_args(parser: argparse.ArgumentParser) -> None: + """Register common arguments for deploying to Fuchsia.""" + + fuchsia_args = parser.add_argument_group( + 'fuchsia', 'Arguments for working with Fuchsia checkout.') + fuchsia_args.add_argument('--fuchsia-out-dir', + help='Path to output directory of a local ' + 'Fuchsia checkout.') + + +def main(): + """Stand-alone program for deploying to the output directory of a local + Fuchsia checkout.""" + + parser = argparse.ArgumentParser() + parser.add_argument('package', help='The package to deploy to Fuchsia.') + register_common_args(parser) + register_fuchsia_args(parser) + args = parser.parse_args() + + fuchsia_out_dir = os.path.expanduser(args.fuchsia_out_dir) + package_paths = read_package_paths(args.out_dir, args.package) + publish_packages(package_paths, os.path.join(fuchsia_out_dir, + 'amber-files')) + install_symbols(package_paths, fuchsia_out_dir) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/fuchsia/test/deploy_to_fuchsia_unittests.py b/build/fuchsia/test/deploy_to_fuchsia_unittests.py new file mode 100755 index 000000000000..7635b46297ca --- /dev/null +++ b/build/fuchsia/test/deploy_to_fuchsia_unittests.py @@ -0,0 +1,38 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""File for testing deploy_to_fuchsia.py.""" + +import os +import unittest +import unittest.mock as mock + +import deploy_to_fuchsia + + +class DeployToFuchsiaTest(unittest.TestCase): + """Unittests for deploy_to_fuchsia.py.""" + + @mock.patch('deploy_to_fuchsia.read_package_paths', return_value=[]) + @mock.patch('deploy_to_fuchsia.publish_packages') + @mock.patch('deploy_to_fuchsia.install_symbols') + def test_main(self, mock_install, mock_publish, mock_read) -> None: + """Tests |main|.""" + + test_package = 'test_package' + fuchsia_out_dir = 'out/fuchsia' + with mock.patch('sys.argv', [ + 'deploy_to_fuchsia.py', test_package, '-C', 'out/chromium', + '--fuchsia-out-dir', fuchsia_out_dir + ]): + deploy_to_fuchsia.main() + self.assertEqual(mock_read.call_args_list[0][0][1], test_package) + self.assertEqual(mock_publish.call_args_list[0][0][1], + os.path.join(fuchsia_out_dir, 'amber-files')) + self.assertEqual(mock_install.call_args_list[0][0][1], + fuchsia_out_dir) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/fuchsia/test/ermine_ctl.py b/build/fuchsia/test/ermine_ctl.py new file mode 100644 index 000000000000..66253891a2b6 --- /dev/null +++ b/build/fuchsia/test/ermine_ctl.py @@ -0,0 +1,25 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Defines erminctl interface compatible with modern scripts.""" + +import subprocess +from typing import List + +from compatible_utils import get_ssh_prefix +from common import get_ssh_address +import base_ermine_ctl + + +class ErmineCtl(base_ermine_ctl.BaseErmineCtl): + """ErmineCtl adaptation for modern scripts.""" + + def __init__(self, target_id: str): + super().__init__() + self._ssh_prefix = get_ssh_prefix(get_ssh_address(target_id)) + + def execute_command_async(self, args: List[str]) -> subprocess.Popen: + return subprocess.Popen(self._ssh_prefix + args, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + encoding='utf-8') diff --git a/build/fuchsia/test/ffx_emulator.py b/build/fuchsia/test/ffx_emulator.py new file mode 100644 index 000000000000..be473ccb920d --- /dev/null +++ b/build/fuchsia/test/ffx_emulator.py @@ -0,0 +1,162 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Provide helpers for running Fuchsia's `ffx emu`.""" + +import argparse +import ast +import logging +import os +import json +import random +import subprocess + +from contextlib import AbstractContextManager + +from common import check_ssh_config_file, find_image_in_sdk, get_system_info, \ + run_ffx_command, SDK_ROOT +from compatible_utils import get_host_arch, get_sdk_hash + +_EMU_COMMAND_RETRIES = 3 + + +class FfxEmulator(AbstractContextManager): + """A helper for managing emulators.""" + def __init__(self, args: argparse.Namespace) -> None: + if args.product_bundle: + self._product_bundle = args.product_bundle + else: + self._product_bundle = 'terminal.qemu-' + get_host_arch() + + self._enable_graphics = args.enable_graphics + self._hardware_gpu = args.hardware_gpu + self._logs_dir = args.logs_dir + self._with_network = args.with_network + if args.everlasting: + # Do not change the name, it will break the logic. + # ffx has a prefix-matching logic, so 'fuchsia-emulator' is not + # usable to avoid breaking local development workflow. I.e. + # developers can create an everlasting emulator and an ephemeral one + # without interfering each other. + self._node_name = 'fuchsia-everlasting-emulator' + assert self._everlasting() + else: + self._node_name = 'fuchsia-emulator-' + str(random.randint( + 1, 9999)) + + # Set the download path parallel to Fuchsia SDK directory + # permanently so that scripts can always find the product bundles. + run_ffx_command(('config', 'set', 'pbms.storage.path', + os.path.join(SDK_ROOT, os.pardir, 'images'))) + + def _everlasting(self) -> bool: + return self._node_name == 'fuchsia-everlasting-emulator' + + def _start_emulator(self) -> None: + """Start the emulator.""" + logging.info('Starting emulator %s', self._node_name) + check_ssh_config_file() + emu_command = [ + 'emu', 'start', self._product_bundle, '--name', self._node_name + ] + if not self._enable_graphics: + emu_command.append('-H') + if self._hardware_gpu: + emu_command.append('--gpu') + if self._logs_dir: + emu_command.extend( + ('-l', os.path.join(self._logs_dir, 'emulator_log'))) + if self._with_network: + emu_command.extend(('--net', 'tap')) + + # TODO(https://crbug.com/1336776): remove when ffx has native support + # for starting emulator on arm64 host. + if get_host_arch() == 'arm64': + + arm64_qemu_dir = os.path.join(SDK_ROOT, 'tools', 'arm64', + 'qemu_internal') + + # The arm64 emulator binaries are downloaded separately, so add + # a symlink to the expected location inside the SDK. + if not os.path.isdir(arm64_qemu_dir): + os.symlink( + os.path.join(SDK_ROOT, '..', '..', 'qemu-linux-arm64'), + arm64_qemu_dir) + + # Add the arm64 emulator binaries to the SDK's manifest.json file. + sdk_manifest = os.path.join(SDK_ROOT, 'meta', 'manifest.json') + with open(sdk_manifest, 'r+') as f: + data = json.load(f) + for part in data['parts']: + if part['meta'] == 'tools/x64/qemu_internal-meta.json': + part['meta'] = 'tools/arm64/qemu_internal-meta.json' + break + f.seek(0) + json.dump(data, f) + f.truncate() + + # Generate a meta file for the arm64 emulator binaries using its + # x64 counterpart. + qemu_arm64_meta_file = os.path.join(SDK_ROOT, 'tools', 'arm64', + 'qemu_internal-meta.json') + qemu_x64_meta_file = os.path.join(SDK_ROOT, 'tools', 'x64', + 'qemu_internal-meta.json') + with open(qemu_x64_meta_file) as f: + data = str(json.load(f)) + qemu_arm64_meta = data.replace(r'tools/x64', 'tools/arm64') + with open(qemu_arm64_meta_file, "w+") as f: + json.dump(ast.literal_eval(qemu_arm64_meta), f) + emu_command.extend(['--engine', 'qemu']) + + for i in range(_EMU_COMMAND_RETRIES): + + # If the ffx daemon fails to establish a connection with + # the emulator after 85 seconds, that means the emulator + # failed to be brought up and a retry is needed. + # TODO(fxb/103540): Remove retry when start up issue is fixed. + try: + # TODO(fxb/125872): Debug is added for examining flakiness. + configs = ['emu.start.timeout=90'] + if i > 0: + logging.warning( + 'Emulator failed to start. Turning on debug') + configs.append('log.level=debug') + run_ffx_command(emu_command, timeout=85, configs=configs) + break + except (subprocess.TimeoutExpired, subprocess.CalledProcessError): + run_ffx_command(('emu', 'stop')) + + def _shutdown_emulator(self) -> None: + """Shutdown the emulator.""" + + logging.info('Stopping the emulator %s', self._node_name) + # The emulator might have shut down unexpectedly, so this command + # might fail. + run_ffx_command(('emu', 'stop', self._node_name), check=False) + + def __enter__(self) -> str: + """Start the emulator if necessary. + + Returns: + The node name of the emulator. + """ + + if self._everlasting(): + sdk_hash = get_sdk_hash(find_image_in_sdk(self._product_bundle)) + sys_info = get_system_info(self._node_name) + if sdk_hash == sys_info: + return self._node_name + logging.info( + ('The emulator version [%s] does not match the SDK [%s], ' + 'updating...'), sys_info, sdk_hash) + + self._start_emulator() + return self._node_name + + def __exit__(self, exc_type, exc_value, traceback) -> bool: + """Shutdown the emulator if necessary.""" + + if not self._everlasting(): + self._shutdown_emulator() + # Do not suppress exceptions. + return False diff --git a/build/fuchsia/test/ffx_emulator_unittests.py b/build/fuchsia/test/ffx_emulator_unittests.py new file mode 100755 index 000000000000..e12f13aa9b83 --- /dev/null +++ b/build/fuchsia/test/ffx_emulator_unittests.py @@ -0,0 +1,49 @@ +#!/usr/bin/env vpython3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""File for testing ffx_emulator.py.""" + +import argparse +import unittest + +from ffx_emulator import FfxEmulator + + +class FfxEmulatorTest(unittest.TestCase): + """Unittests for ffx_emulator.py""" + def test_use_fixed_node_name(self) -> None: + """FfxEmulator should use a fixed node name.""" + # Allowing the test case to access FfxEmulator._node_name directly. + # pylint: disable=protected-access + self.assertEqual( + FfxEmulator( + argparse.Namespace( + **{ + 'product_bundle': None, + 'enable_graphics': False, + 'hardware_gpu': False, + 'logs_dir': '.', + 'with_network': False, + 'everlasting': True + }))._node_name, 'fuchsia-everlasting-emulator') + + def test_use_random_node_name(self) -> None: + """FfxEmulator should not use a fixed node name.""" + # Allowing the test case to access FfxEmulator._node_name directly. + # pylint: disable=protected-access + self.assertNotEqual( + FfxEmulator( + argparse.Namespace( + **{ + 'product_bundle': None, + 'enable_graphics': False, + 'hardware_gpu': False, + 'logs_dir': '.', + 'with_network': False, + 'everlasting': False + }))._node_name, 'fuchsia-everlasting-emulator') + + +if __name__ == '__main__': + unittest.main() diff --git a/build/fuchsia/test/ffx_integration.py b/build/fuchsia/test/ffx_integration.py new file mode 100644 index 000000000000..9385e93f9fea --- /dev/null +++ b/build/fuchsia/test/ffx_integration.py @@ -0,0 +1,236 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Provide helpers for running Fuchsia's `ffx`.""" + +import logging +import os +import json +import subprocess +import sys +import tempfile + +from contextlib import AbstractContextManager +from typing import IO, Iterable, List, Optional + +from common import run_continuous_ffx_command, run_ffx_command, SDK_ROOT + +RUN_SUMMARY_SCHEMA = \ + 'https://fuchsia.dev/schema/ffx_test/run_summary-8d1dd964.json' + + +def get_config(name: str) -> Optional[str]: + """Run a ffx config get command to retrieve the config value.""" + + try: + return run_ffx_command(['config', 'get', name], + capture_output=True).stdout.strip() + except subprocess.CalledProcessError as cpe: + # A return code of 2 indicates no previous value set. + if cpe.returncode == 2: + return None + raise + + +class ScopedFfxConfig(AbstractContextManager): + """Temporarily overrides `ffx` configuration. Restores the previous value + upon exit.""" + + def __init__(self, name: str, value: str) -> None: + """ + Args: + name: The name of the property to set. + value: The value to associate with `name`. + """ + self._old_value = None + self._new_value = value + self._name = name + + def __enter__(self): + """Override the configuration.""" + + # Cache the old value. + self._old_value = get_config(self._name) + if self._new_value != self._old_value: + run_ffx_command(['config', 'set', self._name, self._new_value]) + return self + + def __exit__(self, exc_type, exc_val, exc_tb) -> bool: + if self._new_value == self._old_value: + return False + + # Allow removal of config to fail. + remove_cmd = run_ffx_command(['config', 'remove', self._name], + check=False) + if remove_cmd.returncode != 0: + logging.warning('Error when removing ffx config %s', self._name) + + # Explicitly set the value back only if removing the new value doesn't + # already restore the old value. + if self._old_value is not None and \ + self._old_value != get_config(self._name): + run_ffx_command(['config', 'set', self._name, self._old_value]) + + # Do not suppress exceptions. + return False + + +def test_connection(target_id: Optional[str]) -> None: + """Run an echo test to verify that the device can be connected to.""" + + run_ffx_command(('target', 'echo'), target_id) + + +class FfxTestRunner(AbstractContextManager): + """A context manager that manages a session for running a test via `ffx`. + + Upon entry, an instance of this class configures `ffx` to retrieve files + generated by a test and prepares a directory to hold these files either in a + specified directory or in tmp. On exit, any previous configuration of + `ffx` is restored and the temporary directory, if used, is deleted. + + The prepared directory is used when invoking `ffx test run`. + """ + + def __init__(self, results_dir: Optional[str] = None) -> None: + """ + Args: + results_dir: Directory on the host where results should be stored. + """ + self._results_dir = results_dir + self._custom_artifact_directory = None + self._temp_results_dir = None + self._debug_data_directory = None + + def __enter__(self): + if self._results_dir: + os.makedirs(self._results_dir, exist_ok=True) + else: + self._temp_results_dir = tempfile.TemporaryDirectory() + self._results_dir = self._temp_results_dir.__enter__() + return self + + def __exit__(self, exc_type, exc_val, exc_tb) -> bool: + if self._temp_results_dir: + self._temp_results_dir.__exit__(exc_type, exc_val, exc_tb) + self._temp_results_dir = None + + # Do not suppress exceptions. + return False + + def run_test(self, + component_uri: str, + test_args: Optional[Iterable[str]] = None, + node_name: Optional[str] = None) -> subprocess.Popen: + """Starts a subprocess to run a test on a target. + Args: + component_uri: The test component URI. + test_args: Arguments to the test package, if any. + node_name: The target on which to run the test. + Returns: + A subprocess.Popen object. + """ + command = [ + 'test', 'run', '--output-directory', self._results_dir, + component_uri + ] + if test_args: + command.append('--') + command.extend(test_args) + return run_continuous_ffx_command(command, + node_name, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + + def _parse_test_outputs(self): + """Parses the output files generated by the test runner. + + The instance's `_custom_artifact_directory` member is set to the + directory holding output files emitted by the test. + + This function is idempotent, and performs no work if it has already been + called. + """ + if self._custom_artifact_directory: + return + + run_summary_path = os.path.join(self._results_dir, 'run_summary.json') + try: + with open(run_summary_path) as run_summary_file: + run_summary = json.load(run_summary_file) + except IOError: + logging.exception('Error reading run summary file.') + return + except ValueError: + logging.exception('Error parsing run summary file %s', + run_summary_path) + return + + assert run_summary['schema_id'] == RUN_SUMMARY_SCHEMA, \ + 'Unsupported version found in %s' % run_summary_path + + run_artifact_dir = run_summary.get('data', {})['artifact_dir'] + for artifact_path, artifact in run_summary.get( + 'data', {})['artifacts'].items(): + if artifact['artifact_type'] == 'DEBUG': + self._debug_data_directory = os.path.join( + self._results_dir, run_artifact_dir, artifact_path) + break + + if run_summary['data']['outcome'] == "NOT_STARTED": + logging.critical('Test execution was interrupted. Either the ' + 'emulator crashed while the tests were still ' + 'running or connection to the device was lost.') + sys.exit(1) + + # There should be precisely one suite for the test that ran. + suites_list = run_summary.get('data', {}).get('suites') + if not suites_list: + logging.error('Missing or empty list of suites in %s', + run_summary_path) + return + suite_summary = suites_list[0] + + # Get the top-level directory holding all artifacts for this suite. + artifact_dir = suite_summary.get('artifact_dir') + if not artifact_dir: + logging.error('Failed to find suite\'s artifact_dir in %s', + run_summary_path) + return + + # Get the path corresponding to artifacts + for artifact_path, artifact in suite_summary['artifacts'].items(): + if artifact['artifact_type'] == 'CUSTOM': + self._custom_artifact_directory = os.path.join( + self._results_dir, artifact_dir, artifact_path) + break + + def get_custom_artifact_directory(self) -> str: + """Returns the full path to the directory holding custom artifacts + emitted by the test or None if the directory could not be discovered. + """ + self._parse_test_outputs() + return self._custom_artifact_directory + + def get_debug_data_directory(self): + """Returns the full path to the directory holding debug data + emitted by the test, or None if the path cannot be determined. + """ + self._parse_test_outputs() + return self._debug_data_directory + + +def run_symbolizer(symbol_paths: List[str], input_fd: IO, + output_fd: IO) -> subprocess.Popen: + """Runs symbolizer that symbolizes |input| and outputs to |output|.""" + + symbolize_cmd = ([ + 'debug', 'symbolize', '--', '--omit-module-lines', '--build-id-dir', + os.path.join(SDK_ROOT, '.build-id') + ]) + for path in symbol_paths: + symbolize_cmd.extend(['--ids-txt', path]) + return run_continuous_ffx_command(symbolize_cmd, + stdin=input_fd, + stdout=output_fd, + stderr=subprocess.STDOUT) diff --git a/build/fuchsia/test/flash_device.py b/build/fuchsia/test/flash_device.py new file mode 100755 index 000000000000..291f6e90fb8e --- /dev/null +++ b/build/fuchsia/test/flash_device.py @@ -0,0 +1,243 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Implements commands for flashing a Fuchsia device.""" + +import argparse +import logging +import os +import subprocess +import sys +import time + +from typing import Optional, Tuple + +import common +from common import BootMode, boot_device, check_ssh_config_file, \ + get_system_info, find_image_in_sdk, register_device_args +from compatible_utils import get_sdk_hash, get_ssh_keys, pave, \ + running_unattended, add_exec_to_file, get_host_arch +from lockfile import lock + +# Flash-file lock. Used to restrict number of flash operations per host. +# File lock should be marked as stale after 15 mins. +_FF_LOCK = os.path.join('/tmp', 'flash.lock') +_FF_LOCK_STALE_SECS = 60 * 15 +_FF_LOCK_ACQ_TIMEOUT = _FF_LOCK_STALE_SECS + + +def _get_system_info(target: Optional[str], + serial_num: Optional[str]) -> Tuple[str, str]: + """Retrieves installed OS version from device. + + Args: + target: Target to get system info of. + serial_num: Serial number of device to get system info of. + Returns: + Tuple of strings, containing (product, version number). + """ + + # TODO(b/242191374): Remove when devices in swarming are no longer booted + # into zedboot. + if running_unattended(): + try: + boot_device(target, BootMode.REGULAR, serial_num) + except (subprocess.CalledProcessError, common.StateTransitionError): + logging.warning('Could not boot device. Assuming in ZEDBOOT') + return ('', '') + wait_cmd = common.run_ffx_command(('target', 'wait', '-t', '180'), + target, + check=False) + if wait_cmd.returncode != 0: + return ('', '') + + return get_system_info(target) + + +def update_required( + os_check, + system_image_dir: Optional[str], + target: Optional[str], + serial_num: Optional[str] = None) -> Tuple[bool, Optional[str]]: + """Returns True if a system update is required and path to image dir.""" + + if os_check == 'ignore': + return False, system_image_dir + if not system_image_dir: + raise ValueError('System image directory must be specified.') + if not os.path.exists(system_image_dir): + logging.warning( + 'System image directory does not exist. Assuming it\'s ' + 'a product-bundle name and dynamically searching for ' + 'image directory') + path = find_image_in_sdk(system_image_dir) + if not path: + raise FileNotFoundError( + f'System image directory {system_image_dir} could not' + 'be found') + system_image_dir = path + if (os_check == 'check' + and get_sdk_hash(system_image_dir) == _get_system_info( + target, serial_num)): + return False, system_image_dir + return True, system_image_dir + + +def _add_exec_to_flash_binaries(system_image_dir: str) -> None: + """Add exec to required flash files. + + The flash files may vary depending if a product-bundle or a prebuilt images + directory is being used. + Args: + system_image_dir: string path to the directory containing the flash files. + """ + pb_files = [ + 'flash.sh', + os.path.join(f'host_{get_host_arch()}', 'fastboot') + ] + image_files = ['flash.sh', f'fastboot.exe.linux-{get_host_arch()}'] + use_pb_files = os.path.exists(os.path.join(system_image_dir, pb_files[1])) + for f in pb_files if use_pb_files else image_files: + add_exec_to_file(os.path.join(system_image_dir, f)) + + +def _run_flash_command(system_image_dir: str, target_id: Optional[str]): + """Helper function for running `ffx target flash`.""" + + _add_exec_to_flash_binaries(system_image_dir) + # TODO(fxb/91843): Remove workaround when ffx has stable support for + # multiple hardware devices connected via USB. + if running_unattended(): + flash_cmd = [ + os.path.join(system_image_dir, 'flash.sh'), + '--ssh-key=%s' % get_ssh_keys() + ] + # Target ID could be the nodename or the Serial number. + if target_id: + flash_cmd.extend(('-s', target_id)) + subprocess.run(flash_cmd, check=True, timeout=240) + return + + manifest = os.path.join(system_image_dir, 'flash-manifest.manifest') + common.run_ffx_command( + ('target', 'flash', manifest, '--no-bootloader-reboot'), + target_id=target_id, + configs=[ + 'fastboot.usb.disabled=true', 'ffx.fastboot.inline_target=true', + 'fastboot.reboot.reconnect_timeout=120' + ]) + + +def flash(system_image_dir: str, + target: Optional[str], + serial_num: Optional[str] = None) -> None: + """Flash the device.""" + # Flash only with a file lock acquired. + # This prevents multiple fastboot binaries from flashing concurrently, + # which should increase the odds of flashing success. + with lock(_FF_LOCK, timeout=_FF_LOCK_ACQ_TIMEOUT): + if serial_num: + boot_device(target, BootMode.BOOTLOADER, serial_num) + for _ in range(10): + time.sleep(10) + if common.run_ffx_command(('target', 'list', serial_num), + check=False).returncode == 0: + break + _run_flash_command(system_image_dir, serial_num) + else: + _run_flash_command(system_image_dir, target) + + +def update(system_image_dir: str, + os_check: str, + target: Optional[str], + serial_num: Optional[str] = None, + should_pave: Optional[bool] = True) -> None: + """Conditionally updates target given. + + Args: + system_image_dir: string, path to image directory. + os_check: , which decides how to update the device. + target: Node-name string indicating device that should be updated. + serial_num: String of serial number of device that should be updated. + should_pave: Optional bool on whether or not to pave or flash. + """ + needs_update, actual_image_dir = update_required(os_check, + system_image_dir, target, + serial_num) + + system_image_dir = actual_image_dir + if needs_update: + check_ssh_config_file() + if should_pave: + if running_unattended(): + assert target, ('Target ID must be specified on swarming when' + ' paving.') + # TODO(crbug.com/1405525): We should check the device state + # before and after rebooting it to avoid unnecessary reboot or + # undesired state. + boot_device(target, BootMode.RECOVERY, serial_num) + try: + pave(system_image_dir, target) + except subprocess.TimeoutExpired: + # Fallback to flashing, just in case it might work. + # This could recover the device and make it usable. + # If it fails, device is unpaveable anyway, and should be taken + # out of fleet - this will do that. + flash(system_image_dir, target, serial_num) + else: + flash(system_image_dir, target, serial_num) + # Always sleep after all updates. + time.sleep(180) + + +def register_update_args(arg_parser: argparse.ArgumentParser, + default_os_check: Optional[str] = 'check', + default_pave: Optional[bool] = True) -> None: + """Register common arguments for device updating.""" + serve_args = arg_parser.add_argument_group('update', + 'device updating arguments') + serve_args.add_argument('--system-image-dir', + help='Specify the directory that contains the ' + 'Fuchsia image used to pave the device. Only ' + 'needs to be specified if "os_check" is not ' + '"ignore".') + serve_args.add_argument('--serial-num', + default=os.environ.get('FUCHSIA_FASTBOOT_SERNUM'), + help='Serial number of the device. Should be ' + 'specified for devices that do not have an image ' + 'flashed.') + serve_args.add_argument('--os-check', + choices=['check', 'update', 'ignore'], + default=default_os_check, + help='Sets the OS version enforcement policy. If ' + '"check", then the deployment process will halt ' + 'if the target\'s version does not match. If ' + '"update", then the target device will ' + 'be reflashed. If "ignore", then the OS version ' + 'will not be checked.') + serve_args.add_argument('--pave', + action='store_true', + help='Performs a pave instead of a flash. ' + 'Device must already be in Zedboot') + serve_args.add_argument('--no-pave', + action='store_false', + dest='pave', + help='Performs a flash instead of a pave ' + '(experimental).') + serve_args.set_defaults(pave=default_pave) + + +def main(): + """Stand-alone function for flashing a device.""" + parser = argparse.ArgumentParser() + register_device_args(parser) + register_update_args(parser, default_os_check='update', default_pave=False) + args = parser.parse_args() + update(args.system_image_dir, args.os_check, args.target_id, + args.serial_num, args.pave) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/fuchsia/test/flash_device_unittests.py b/build/fuchsia/test/flash_device_unittests.py new file mode 100755 index 000000000000..0233ba9660d3 --- /dev/null +++ b/build/fuchsia/test/flash_device_unittests.py @@ -0,0 +1,349 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""File for testing flash_device.py.""" + +import os +import subprocess +import unittest +import unittest.mock as mock + +import common +import flash_device + +_TEST_IMAGE_DIR = 'test/image/dir' +_TEST_PRODUCT = 'test_product' +_TEST_VERSION = 'test.version' + + +# pylint: disable=too-many-public-methods,protected-access +class FlashDeviceTest(unittest.TestCase): + """Unittests for flash_device.py.""" + + def setUp(self) -> None: + context_mock = mock.Mock() + context_mock.__enter__ = mock.Mock(return_value=None) + context_mock.__exit__ = mock.Mock(return_value=None) + ffx_mock = mock.Mock() + ffx_mock.returncode = 0 + ffx_patcher = mock.patch('common.run_ffx_command', + return_value=ffx_mock) + sdk_hash_patcher = mock.patch('flash_device.get_sdk_hash', + return_value=(_TEST_PRODUCT, + _TEST_VERSION)) + swarming_patcher = mock.patch('flash_device.running_unattended', + return_value=False) + check_patcher = mock.patch('flash_device.check_ssh_config_file') + time_sleep = mock.patch('time.sleep') + self._ffx_mock = ffx_patcher.start() + self._sdk_hash_mock = sdk_hash_patcher.start() + self._check_patcher_mock = check_patcher.start() + self._swarming_mock = swarming_patcher.start() + self._time_sleep = time_sleep.start() + self.addCleanup(self._ffx_mock.stop) + self.addCleanup(self._sdk_hash_mock.stop) + self.addCleanup(self._check_patcher_mock.stop) + self.addCleanup(self._swarming_mock.stop) + self.addCleanup(self._time_sleep.stop) + + def test_update_required_on_ignore_returns_immediately(self) -> None: + """Test |os_check|='ignore' skips all checks.""" + result, new_image_dir = flash_device.update_required( + 'ignore', 'some-image-dir', None) + + self.assertFalse(result) + self.assertEqual(new_image_dir, 'some-image-dir') + + def test_update_required_raises_value_error_if_no_image_dir(self) -> None: + """Test |os_check|!='ignore' checks that image dir is non-Falsey.""" + with self.assertRaises(ValueError): + flash_device.update_required('update', None, None) + + def test_update_required_logs_missing_image_dir(self) -> None: + """Test |os_check|!='ignore' warns if image dir does not exist.""" + with mock.patch('os.path.exists', return_value=False), \ + mock.patch('flash_device.find_image_in_sdk'), \ + mock.patch('flash_device._get_system_info'), \ + self.assertLogs() as logger: + flash_device.update_required('update', 'some/image/dir', None) + self.assertIn('image directory does not exist', logger.output[0]) + + def test_update_required_searches_and_returns_sdk_if_image_found(self + ) -> None: + """Test |os_check|!='ignore' searches for image dir in SDK.""" + with mock.patch('os.path.exists', return_value=False), \ + mock.patch('flash_device.find_image_in_sdk') as mock_find, \ + mock.patch('flash_device._get_system_info'), \ + mock.patch('common.SDK_ROOT', 'path/to/sdk/dir'), \ + self.assertLogs(): + mock_find.return_value = 'path/to/image/dir' + update_required, new_image_dir = flash_device.update_required( + 'update', 'product-bundle', None, None) + self.assertTrue(update_required) + self.assertEqual(new_image_dir, 'path/to/image/dir') + mock_find.assert_called_once_with('product-bundle') + + def test_update_required_raises_file_not_found_error(self) -> None: + """Test |os_check|!='ignore' raises FileNotFoundError if no path.""" + with mock.patch('os.path.exists', return_value=False), \ + mock.patch('flash_device.find_image_in_sdk', + return_value=None), \ + mock.patch('common.SDK_ROOT', 'path/to/sdk/dir'), \ + self.assertLogs(), \ + self.assertRaises(FileNotFoundError): + flash_device.update_required('update', 'product-bundle', None) + + def test_update_ignore(self) -> None: + """Test setting |os_check| to 'ignore'.""" + + flash_device.update(_TEST_IMAGE_DIR, 'ignore', None) + self.assertEqual(self._ffx_mock.call_count, 0) + self.assertEqual(self._sdk_hash_mock.call_count, 0) + + def test_dir_unspecified_value_error(self) -> None: + """Test ValueError raised when system_image_dir unspecified.""" + + with self.assertRaises(ValueError): + flash_device.update(None, 'check', None) + + def test_update_system_info_match(self) -> None: + """Test no update when |os_check| is 'check' and system info matches.""" + + with mock.patch('os.path.exists', return_value=True): + self._ffx_mock.return_value.stdout = \ + '[{"title": "Build", "child": [{"value": "%s"}, ' \ + '{"value": "%s"}]}]' % (_TEST_VERSION, _TEST_PRODUCT) + flash_device.update(_TEST_IMAGE_DIR, 'check', None) + self.assertEqual(self._ffx_mock.call_count, 1) + self.assertEqual(self._sdk_hash_mock.call_count, 1) + + def test_update_system_info_catches_boot_failure(self) -> None: + """Test update when |os_check=check| catches boot_device exceptions.""" + + self._swarming_mock.return_value = True + with mock.patch('os.path.exists', return_value=True), \ + mock.patch('flash_device._add_exec_to_flash_binaries'), \ + mock.patch('flash_device.boot_device') as mock_boot, \ + mock.patch('flash_device.get_system_info') as mock_sys_info, \ + mock.patch('flash_device.subprocess.run'): + mock_boot.side_effect = common.StateTransitionError( + 'Incorrect state') + self._ffx_mock.return_value.stdout = \ + '[{"title": "Build", "child": [{"value": "wrong.version"}, ' \ + '{"value": "wrong_product"}]}]' + flash_device.update(_TEST_IMAGE_DIR, + 'check', + None, + should_pave=False) + # Regular boot is to check the versions. + mock_boot.assert_called_once_with(mock.ANY, + common.BootMode.REGULAR, None) + self.assertEqual(self._ffx_mock.call_count, 0) + + # get_system_info should not even be called due to early exit. + mock_sys_info.assert_not_called() + + def test_update_system_info_mismatch(self) -> None: + """Test update when |os_check| is 'check' and system info does not + match.""" + + self._swarming_mock.return_value = True + with mock.patch('os.path.exists', return_value=True), \ + mock.patch('flash_device._add_exec_to_flash_binaries'), \ + mock.patch('flash_device.boot_device') as mock_boot, \ + mock.patch('flash_device.subprocess.run'): + self._ffx_mock.return_value.stdout = \ + '[{"title": "Build", "child": [{"value": "wrong.version"}, ' \ + '{"value": "wrong_product"}]}]' + flash_device.update(_TEST_IMAGE_DIR, + 'check', + None, + should_pave=False) + # Regular boot is to check the versions. + mock_boot.assert_called_once_with(mock.ANY, + common.BootMode.REGULAR, None) + self.assertEqual(self._ffx_mock.call_count, 2) + + def test_update_system_info_mismatch_adds_exec_to_flash_binaries(self + ) -> None: + """Test update adds exec bit to flash binaries if flashing.""" + + with mock.patch('os.path.exists', return_value=True), \ + mock.patch('flash_device.get_host_arch', + return_value='foo_arch'), \ + mock.patch('flash_device.add_exec_to_file') as add_exec: + self._ffx_mock.return_value.stdout = \ + '[{"title": "Build", "child": [{"value": "wrong.version"}, ' \ + '{"value": "wrong_product"}]}]' + flash_device.update(_TEST_IMAGE_DIR, + 'check', + None, + should_pave=False) + add_exec.assert_has_calls([ + mock.call(os.path.join(_TEST_IMAGE_DIR, 'flash.sh')), + mock.call( + os.path.join(_TEST_IMAGE_DIR, 'host_foo_arch', 'fastboot')) + ], + any_order=True) + + def test_update_adds_exec_to_flash_binaries_depending_on_location( + self) -> None: + """Test update adds exec bit to flash binaries if flashing.""" + + # First exists is for image dir, second is for fastboot binary. + # Missing this fastboot binary means that the test will default to a + # different path. + with mock.patch('os.path.exists', side_effect=[True, False]), \ + mock.patch('flash_device.get_host_arch', + return_value='foo_arch'), \ + mock.patch('flash_device.add_exec_to_file') as add_exec: + self._ffx_mock.return_value.stdout = \ + '[{"title": "Build", "child": [{"value": "wrong.version"}, ' \ + '{"value": "wrong_product"}]}]' + flash_device.update(_TEST_IMAGE_DIR, + 'check', + None, + should_pave=False) + add_exec.assert_has_calls([ + mock.call(os.path.join(_TEST_IMAGE_DIR, 'flash.sh')), + mock.call( + os.path.join(_TEST_IMAGE_DIR, + 'fastboot.exe.linux-foo_arch')) + ], + any_order=True) + + def test_incorrect_target_info(self) -> None: + """Test update when |os_check| is 'check' and system info was not + retrieved.""" + with mock.patch('os.path.exists', return_value=True), \ + mock.patch('flash_device._add_exec_to_flash_binaries'): + self._ffx_mock.return_value.stdout = '[{"title": "badtitle"}]' + flash_device.update(_TEST_IMAGE_DIR, + 'check', + None, + should_pave=False) + self.assertEqual(self._ffx_mock.call_count, 2) + + def test_update_with_serial_num(self) -> None: + """Test update when |serial_num| is specified.""" + + with mock.patch('time.sleep'), \ + mock.patch('os.path.exists', return_value=True), \ + mock.patch('flash_device.boot_device') as mock_boot, \ + mock.patch('flash_device._add_exec_to_flash_binaries'): + flash_device.update(_TEST_IMAGE_DIR, + 'update', + None, + 'test_serial', + should_pave=False) + mock_boot.assert_called_once_with(mock.ANY, + common.BootMode.BOOTLOADER, + 'test_serial') + self.assertEqual(self._ffx_mock.call_count, 2) + + def test_reboot_failure(self) -> None: + """Test update when |serial_num| is specified.""" + self._ffx_mock.return_value.returncode = 1 + with mock.patch('time.sleep'), \ + mock.patch('os.path.exists', return_value=True), \ + mock.patch('flash_device.running_unattended', + return_value=True), \ + mock.patch('flash_device.boot_device'): + required, _ = flash_device.update_required('check', + _TEST_IMAGE_DIR, None) + self.assertEqual(required, True) + + # pylint: disable=no-self-use + def test_update_calls_paving_if_specified(self) -> None: + """Test update calls pave if specified.""" + with mock.patch('time.sleep'), \ + mock.patch('os.path.exists', return_value=True), \ + mock.patch('flash_device.running_unattended', + return_value=True), \ + mock.patch('flash_device.boot_device') as mock_boot, \ + mock.patch('flash_device.pave') as mock_pave: + flash_device.update(_TEST_IMAGE_DIR, + 'update', + 'some-target-id', + should_pave=True) + + mock_boot.assert_called_once_with('some-target-id', + common.BootMode.RECOVERY, None) + mock_pave.assert_called_once_with(_TEST_IMAGE_DIR, + 'some-target-id') + + # pylint: enable=no-self-use + + def test_update_raises_error_if_unattended_with_no_target(self) -> None: + """Test update raises error if no target specified.""" + + self._swarming_mock.return_value = True + with mock.patch('time.sleep'), \ + mock.patch('flash_device.pave'), \ + mock.patch('os.path.exists', return_value=True): + self.assertRaises(AssertionError, + flash_device.update, + _TEST_IMAGE_DIR, + 'update', + None, + should_pave=True) + + def test_update_on_swarming(self) -> None: + """Test update on swarming bots.""" + + self._swarming_mock.return_value = True + with mock.patch('time.sleep'), \ + mock.patch('os.path.exists', return_value=True), \ + mock.patch('flash_device._add_exec_to_flash_binaries'), \ + mock.patch('flash_device.boot_device') as mock_boot, \ + mock.patch('subprocess.run'): + flash_device.update(_TEST_IMAGE_DIR, + 'update', + None, + 'test_serial', + should_pave=False) + mock_boot.assert_called_once_with(mock.ANY, + common.BootMode.BOOTLOADER, + 'test_serial') + self.assertEqual(self._ffx_mock.call_count, 1) + + # pylint: disable=no-self-use + def test_update_with_pave_timeout_defaults_to_flash(self) -> None: + """Test update falls back to flash if pave fails.""" + with mock.patch('time.sleep'), \ + mock.patch('os.path.exists', return_value=True), \ + mock.patch('flash_device.running_unattended', + return_value=True), \ + mock.patch('flash_device.pave') as mock_pave, \ + mock.patch('flash_device.boot_device'), \ + mock.patch('flash_device.flash') as mock_flash: + mock_pave.side_effect = subprocess.TimeoutExpired( + cmd='/some/cmd', + timeout=0, + ) + flash_device.update(_TEST_IMAGE_DIR, + 'update', + 'some-target-id', + should_pave=True) + mock_pave.assert_called_once_with(_TEST_IMAGE_DIR, + 'some-target-id') + mock_flash.assert_called_once_with(_TEST_IMAGE_DIR, + 'some-target-id', None) + + # pylint: enable=no-self-use + + def test_main(self) -> None: + """Tests |main| function.""" + + with mock.patch( + 'sys.argv', + ['flash_device.py', '--os-check', 'ignore', '--no-pave']): + with mock.patch.dict(os.environ, {}): + flash_device.main() + self.assertEqual(self._ffx_mock.call_count, 0) +# pylint: enable=too-many-public-methods,protected-access + + +if __name__ == '__main__': + unittest.main() diff --git a/build/fuchsia/test/lockfile.py b/build/fuchsia/test/lockfile.py new file mode 100644 index 000000000000..422cfe4c2403 --- /dev/null +++ b/build/fuchsia/test/lockfile.py @@ -0,0 +1,79 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Exclusive filelocking for all supported platforms. + +Copied from third_party/depot_tools/lockfile.py. +""" + +import contextlib +import fcntl +import logging +import os +import time + + +class LockError(Exception): + """Error raised if timeout or lock (without timeout) fails.""" + + +def _open_file(lockfile): + open_flags = (os.O_CREAT | os.O_WRONLY) + return os.open(lockfile, open_flags, 0o644) + + +def _close_file(file_descriptor): + os.close(file_descriptor) + + +def _lock_file(file_descriptor): + fcntl.flock(file_descriptor, fcntl.LOCK_EX | fcntl.LOCK_NB) + + +def _try_lock(lockfile): + f = _open_file(lockfile) + try: + _lock_file(f) + except Exception: + _close_file(f) + raise + return lambda: _close_file(f) + + +def _lock(path, timeout=0): + """_lock returns function to release the lock if locking was successful. + + _lock also implements simple retry logic.""" + elapsed = 0 + while True: + try: + return _try_lock(path + '.locked') + except (OSError, IOError) as error: + if elapsed < timeout: + sleep_time = min(10, timeout - elapsed) + logging.info( + 'Could not create lockfile; will retry after sleep(%d).', + sleep_time) + elapsed += sleep_time + time.sleep(sleep_time) + continue + raise LockError("Error locking %s (err: %s)" % + (path, str(error))) from error + + +@contextlib.contextmanager +def lock(path, timeout=0): + """Get exclusive lock to path. + + Usage: + import lockfile + with lockfile.lock(path, timeout): + # Do something + pass + + """ + release_fn = _lock(path, timeout) + try: + yield + finally: + release_fn() diff --git a/build/fuchsia/test/log_manager.py b/build/fuchsia/test/log_manager.py new file mode 100755 index 000000000000..98b711d57969 --- /dev/null +++ b/build/fuchsia/test/log_manager.py @@ -0,0 +1,160 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Reads log data from a device.""" + +import argparse +import os +import subprocess +import sys +import time + +from contextlib import AbstractContextManager +from typing import Iterable, Optional, TextIO + +from common import catch_sigterm, read_package_paths, register_common_args, \ + register_device_args, run_continuous_ffx_command, \ + run_ffx_command +from ffx_integration import ScopedFfxConfig, run_symbolizer + + +class LogManager(AbstractContextManager): + """Handles opening and closing file streams for logging purposes.""" + + def __init__(self, logs_dir: Optional[str]) -> None: + self._logs_dir = logs_dir + + # A dictionary with the log file path as the key and a file stream as + # value. + self._log_files = {} + self._log_procs = [] + self._scoped_ffx_log = None + + if self._logs_dir: + self._scoped_ffx_log = ScopedFfxConfig('log.dir', self._logs_dir) + + def __enter__(self): + if self._scoped_ffx_log: + self._scoped_ffx_log.__enter__() + run_ffx_command(('daemon', 'stop'), check=False) + + return self + + def is_logging_enabled(self) -> bool: + """Check whether logging is turned on.""" + + return self._logs_dir is not None + + def add_log_process(self, process: subprocess.Popen) -> None: + """Register a logging process to LogManager to be killed at LogManager + teardown.""" + + self._log_procs.append(process) + + def open_log_file(self, log_file_name: str) -> TextIO: + """Open a file stream with log_file_name in the logs directory.""" + + if not self._logs_dir: + raise Exception('Logging directory is not specified.') + log_file_path = os.path.join(self._logs_dir, log_file_name) + log_file = open(log_file_path, 'w', buffering=1) + self._log_files[log_file_path] = log_file + return log_file + + def stop(self): + """Stop all active logging instances.""" + + for proc in self._log_procs: + proc.kill() + for log in self._log_files.values(): + log.close() + + def __exit__(self, exc_type, exc_value, traceback): + self.stop() + if self._scoped_ffx_log: + self._scoped_ffx_log.__exit__(exc_type, exc_value, traceback) + + # Allow command to fail while ffx team investigates the issue. + run_ffx_command(('daemon', 'stop'), check=False) + + +def start_system_log(log_manager: LogManager, + log_to_stdout: bool, + pkg_paths: Optional[Iterable[str]] = None, + log_args: Optional[Iterable[str]] = None, + target_id: Optional[str] = None) -> None: + """ + Start system logging. + + Args: + log_manager: A LogManager class that manages the log file and process. + log_to_stdout: If set to True, print logs directly to stdout. + pkg_paths: Path to the packages + log_args: Arguments forwarded to `ffx log` command. + target_id: Specify a target to use. + """ + + if not log_manager.is_logging_enabled() and not log_to_stdout: + return + symbol_paths = None + if pkg_paths: + symbol_paths = [] + + # Locate debug symbols for each package. + for pkg_path in pkg_paths: + assert os.path.isfile(pkg_path), '%s does not exist' % pkg_path + symbol_paths.append( + os.path.join(os.path.dirname(pkg_path), 'ids.txt')) + + if log_to_stdout: + system_log = sys.stdout + else: + system_log = log_manager.open_log_file('system_log') + log_cmd = ['log', '--raw'] + if log_args: + log_cmd.extend(log_args) + if symbol_paths: + log_proc = run_continuous_ffx_command(log_cmd, + target_id, + stdout=subprocess.PIPE) + log_manager.add_log_process(log_proc) + log_manager.add_log_process( + run_symbolizer(symbol_paths, log_proc.stdout, system_log)) + else: + log_manager.add_log_process( + run_continuous_ffx_command(log_cmd, target_id, stdout=system_log)) + + +def main(): + """Stand-alone function for fetching system logs and print to terminal. + Runs until the process is killed or interrupted (i.e. user presses CTRL-C). + """ + + catch_sigterm() + parser = argparse.ArgumentParser() + register_common_args(parser) + register_device_args(parser) + parser.add_argument('--packages', + action='append', + help='Name of the packages to symbolize.') + manager_args, system_log_args = parser.parse_known_args() + if manager_args.packages and not manager_args.out_dir: + raise ValueError('--out-dir must be specified to symbolize packages.') + package_paths = [] + if manager_args.packages: + for package in manager_args.packages: + package_paths.extend( + read_package_paths(manager_args.out_dir, package)) + with LogManager(None) as log_manager: + try: + start_system_log(log_manager, True, package_paths, system_log_args, + manager_args.target_id) + while True: + time.sleep(10000) + except (KeyboardInterrupt, SystemExit): + pass + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/fuchsia/test/log_manager_unittests.py b/build/fuchsia/test/log_manager_unittests.py new file mode 100755 index 000000000000..66830a836a14 --- /dev/null +++ b/build/fuchsia/test/log_manager_unittests.py @@ -0,0 +1,115 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""File for testing log_manager.py.""" + +import sys +import unittest +import unittest.mock as mock + +import log_manager + +_LOGS_DIR = 'test_logs_dir' + + +class LogManagerTest(unittest.TestCase): + """Unittests for log_manager.py.""" + + @mock.patch('log_manager.run_continuous_ffx_command') + def test_no_logs(self, mock_ffx) -> None: + """Test |start_system_log| does nothing when logging is off.""" + + log = log_manager.LogManager(None) + log_manager.start_system_log(log, False) + self.assertEqual(mock_ffx.call_count, 0) + + @mock.patch('log_manager.run_continuous_ffx_command') + def test_log_to_stdout(self, mock_ffx) -> None: + """Test |start_system_log| logs to stdout when log manager is off.""" + + log = log_manager.LogManager(None) + log_manager.start_system_log(log, True) + self.assertEqual(mock_ffx.call_args_list[0][1]['stdout'], sys.stdout) + self.assertEqual(mock_ffx.call_count, 1) + + @mock.patch('log_manager.run_continuous_ffx_command') + @mock.patch('builtins.open') + def test_log_to_file(self, mock_open, mock_ffx) -> None: + """Test |start_system_log| logs to log file when log manager is on.""" + + log = log_manager.LogManager(_LOGS_DIR) + log_manager.start_system_log(log, False) + self.assertEqual(mock_ffx.call_args_list[0][1]['stdout'], + mock_open.return_value) + self.assertEqual(mock_ffx.call_count, 1) + + @mock.patch('log_manager.run_continuous_ffx_command') + def test_log_with_log_args(self, mock_ffx) -> None: + """Test log args are used when passed in to |start_system_log|.""" + + log = log_manager.LogManager(None) + log_manager.start_system_log(log, True, log_args=['test_log_args']) + self.assertEqual(mock_ffx.call_args_list[0][0][0], + ['log', '--raw', 'test_log_args']) + self.assertEqual(mock_ffx.call_count, 1) + + @mock.patch('log_manager.run_continuous_ffx_command') + def test_log_with_symbols(self, mock_ffx) -> None: + """Test symbols are used when pkg_paths are set.""" + + log = log_manager.LogManager(_LOGS_DIR) + with mock.patch('os.path.isfile', return_value=True), \ + mock.patch('builtins.open'), \ + mock.patch('log_manager.run_symbolizer'): + log_manager.start_system_log(log, False, pkg_paths=['test_pkg']) + log.stop() + self.assertEqual(mock_ffx.call_count, 1) + self.assertEqual(mock_ffx.call_args_list[0][0][0], ['log', '--raw']) + + def test_no_logging_dir_exception(self) -> None: + """Tests empty LogManager throws an exception on |open_log_file|.""" + + log = log_manager.LogManager(None) + with self.assertRaises(Exception): + log.open_log_file('test_log_file') + + @mock.patch('log_manager.ScopedFfxConfig') + @mock.patch('log_manager.run_ffx_command') + def test_log_manager(self, mock_ffx, mock_scoped_config) -> None: + """Tests LogManager as a context manager.""" + + context_mock = mock.Mock() + mock_scoped_config.return_value = context_mock + context_mock.__enter__ = mock.Mock(return_value=None) + context_mock.__exit__ = mock.Mock(return_value=None) + with log_manager.LogManager(_LOGS_DIR): + pass + self.assertEqual(mock_ffx.call_count, 2) + + def test_main_exception(self) -> None: + """Tests |main| function to throw exception on incompatible flags.""" + + with mock.patch('sys.argv', + ['log_manager.py', '--packages', 'test_package']): + with self.assertRaises(ValueError): + log_manager.main() + + @mock.patch('log_manager.read_package_paths') + @mock.patch('log_manager.start_system_log') + def test_main(self, mock_system_log, mock_read_paths) -> None: + """Tests |main| function.""" + + with mock.patch('sys.argv', [ + 'log_manager.py', '--packages', 'test_package', '--out-dir', + 'test_out_dir' + ]): + with mock.patch('log_manager.time.sleep', + side_effect=KeyboardInterrupt): + log_manager.main() + self.assertEqual(mock_system_log.call_count, 1) + self.assertEqual(mock_read_paths.call_count, 1) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/fuchsia/test/publish_package.py b/build/fuchsia/test/publish_package.py new file mode 100755 index 000000000000..5c566544af93 --- /dev/null +++ b/build/fuchsia/test/publish_package.py @@ -0,0 +1,68 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Implements commands for managing Fuchsia repos via the pm tool.""" + +import argparse +import os +import subprocess +import sys + +from typing import Iterable + +from common import SDK_TOOLS_DIR, read_package_paths, register_common_args + +_pm_tool = os.path.join(SDK_TOOLS_DIR, 'pm') + + +def publish_packages(packages: Iterable[str], + repo: str, + new_repo: bool = False) -> None: + """Publish packages to a repo directory, initializing it if necessary.""" + if new_repo: + subprocess.run([_pm_tool, 'newrepo', '-repo', repo], check=True) + for package in packages: + subprocess.run([_pm_tool, 'publish', '-a', '-r', repo, '-f', package], + check=True) + + +def register_package_args(parser: argparse.ArgumentParser, + allow_temp_repo: bool = False) -> None: + """Register common arguments for package publishing.""" + package_args = parser.add_argument_group( + 'package', 'Arguments for package publishing.') + package_args.add_argument('--packages', + action='append', + help='Paths of the package archives to install') + package_args.add_argument('--repo', + help='Directory packages will be published to.') + if allow_temp_repo: + package_args.add_argument( + '--no-repo-init', + action='store_true', + default=False, + help='Do not initialize the package repository.') + + +def main(): + """Stand-alone function for publishing packages.""" + parser = argparse.ArgumentParser() + register_package_args(parser) + register_common_args(parser) + args = parser.parse_args() + if not args.repo: + raise ValueError('Must specify directory to publish packages.') + if not args.packages: + raise ValueError('Must specify packages to publish.') + if args.out_dir: + package_paths = [] + for package in args.packages: + package_paths.extend(read_package_paths(args.out_dir, package)) + else: + package_paths = args.packages + publish_packages(package_paths, args.repo) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/fuchsia/test/publish_package_unittests.py b/build/fuchsia/test/publish_package_unittests.py new file mode 100755 index 000000000000..2bb22da963c9 --- /dev/null +++ b/build/fuchsia/test/publish_package_unittests.py @@ -0,0 +1,103 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""File for testing publish_package.py.""" + +import argparse +import unittest +import unittest.mock as mock + +from io import StringIO + +import publish_package + +_PACKAGES = ['test_package'] +_REPO = 'test_repo' + + +class PublishPackageTest(unittest.TestCase): + """Unittests for publish_package.py.""" + + def setUp(self) -> None: + self._subprocess_patcher = mock.patch('publish_package.subprocess.run') + self._subprocess_mock = self._subprocess_patcher.start() + self.addCleanup(self._subprocess_mock.stop) + + def test_new_repo(self) -> None: + """Test setting |new_repo| to True in |publish_packages|.""" + + publish_package.publish_packages(_PACKAGES, _REPO, True) + self.assertEqual(self._subprocess_mock.call_count, 2) + first_call = self._subprocess_mock.call_args_list[0] + self.assertEqual(['newrepo', '-repo', _REPO], first_call[0][0][1:]) + second_call = self._subprocess_mock.call_args_list[1] + self.assertEqual(['publish', '-a', '-r', _REPO, '-f', _PACKAGES[0]], + second_call[0][0][1:]) + + def test_no_new_repo(self) -> None: + """Test setting |new_repo| to False in |publish_packages|.""" + + publish_package.publish_packages(['test_package'], 'test_repo', False) + self.assertEqual(self._subprocess_mock.call_count, 1) + + + def test_allow_temp_repo(self) -> None: + """Test setting |allow_temp_repo| to True in |register_package_args|.""" + + parser = argparse.ArgumentParser() + publish_package.register_package_args(parser, True) + args = parser.parse_args(['--no-repo-init']) + self.assertEqual(args.no_repo_init, True) + + @mock.patch('sys.stderr', new_callable=StringIO) + def test_not_allow_temp_repo(self, mock_stderr) -> None: + """Test setting |allow_temp_repo| to False in + |register_package_args|.""" + + parser = argparse.ArgumentParser() + publish_package.register_package_args(parser) + with self.assertRaises(SystemExit): + parser.parse_args(['--no-repo-init']) + self.assertRegex(mock_stderr.getvalue(), 'unrecognized arguments') + + def test_main_no_repo_flag(self) -> None: + """Tests that not specifying packages raise a ValueError.""" + + with mock.patch('sys.argv', ['publish_package.py', '--repo', _REPO]): + with self.assertRaises(ValueError): + publish_package.main() + + def test_main_no_packages_flag(self) -> None: + """Tests that not specifying directory raise a ValueError.""" + + with mock.patch('sys.argv', + ['publish_package.py', '--packages', _PACKAGES[0]]): + with self.assertRaises(ValueError): + publish_package.main() + + def test_main_no_out_dir_flag(self) -> None: + """Tests |main| with `out_dir` omitted.""" + + with mock.patch('sys.argv', [ + 'publish_package.py', '--packages', _PACKAGES[0], '--repo', + _REPO + ]): + publish_package.main() + self.assertEqual(self._subprocess_mock.call_count, 1) + + @mock.patch('publish_package.read_package_paths') + def test_main(self, read_mock) -> None: + """Tests |main|.""" + + read_mock.return_value = ['out/test/package/path'] + with mock.patch('sys.argv', [ + 'publish_package.py', '--packages', _PACKAGES[0], '--repo', + _REPO, '--out-dir', 'out/test' + ]): + publish_package.main() + self.assertEqual(self._subprocess_mock.call_count, 1) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/fuchsia/test/pylintrc b/build/fuchsia/test/pylintrc new file mode 100644 index 000000000000..a144b8066660 --- /dev/null +++ b/build/fuchsia/test/pylintrc @@ -0,0 +1,26 @@ +[MESSAGES CONTROL] + +# Disable the message, report, category or checker with the given id(s). +disable=fixme, + +# fixme +# This complains about TODOs, which are perfectly valid to have. + +# Suppression for invalid-name error for PRESUBMIT.py file. +good-names=i,j,k,f,PRESUBMIT + +[REPORTS] + +reports=no + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=6 + +# Maximum number of instance attributes +max-attributes=10 + +[FORMAT] + +max-line-length=80 diff --git a/build/fuchsia/test/run_blink_test.py b/build/fuchsia/test/run_blink_test.py new file mode 100644 index 000000000000..ba71aa69ea38 --- /dev/null +++ b/build/fuchsia/test/run_blink_test.py @@ -0,0 +1,36 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Implements commands for running blink web tests.""" + +import os +import subprocess + +from argparse import Namespace +from typing import Optional + +from common import DIR_SRC_ROOT +from test_runner import TestRunner + +_BLINK_TEST_SCRIPT = os.path.join(DIR_SRC_ROOT, 'third_party', 'blink', + 'tools', 'run_web_tests.py') + + +class BlinkTestRunner(TestRunner): + """Test runner for running blink web tests.""" + + def __init__(self, out_dir: str, test_args: Namespace, + target_id: Optional[str]) -> None: + super().__init__(out_dir, test_args, ['content_shell'], target_id) + + # TODO(crbug.com/1278939): Remove when blink tests use CFv2 content_shell. + @staticmethod + def is_cfv2() -> bool: + return False + + def run_test(self): + test_cmd = [_BLINK_TEST_SCRIPT, '-t', os.path.basename(self._out_dir)] + + if self._test_args: + test_cmd.extend(self._test_args) + return subprocess.run(test_cmd, check=True) diff --git a/build/fuchsia/test/run_executable_test.py b/build/fuchsia/test/run_executable_test.py new file mode 100755 index 000000000000..7c6772be7e27 --- /dev/null +++ b/build/fuchsia/test/run_executable_test.py @@ -0,0 +1,263 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Implements commands for standalone CFv2 test executables.""" + +import argparse +import logging +import os +import shutil +import subprocess +import sys + +from typing import List, Optional + +from common import get_component_uri, get_host_arch, \ + register_common_args, register_device_args, \ + register_log_args +from compatible_utils import map_filter_file_to_package_file +from ffx_integration import FfxTestRunner, run_symbolizer +from test_runner import TestRunner +from test_server import setup_test_server + +DEFAULT_TEST_SERVER_CONCURRENCY = 4 + + +def _copy_custom_output_file(test_runner: FfxTestRunner, file: str, + dest: str) -> None: + """Copy custom test output file from the device to the host.""" + + artifact_dir = test_runner.get_custom_artifact_directory() + if not artifact_dir: + logging.error( + 'Failed to parse custom artifact directory from test summary ' + 'output files. Not copying %s from the device', file) + return + shutil.copy(os.path.join(artifact_dir, file), dest) + + +def _copy_coverage_files(test_runner: FfxTestRunner, dest: str) -> None: + """Copy debug data file from the device to the host if it exists.""" + + coverage_dir = test_runner.get_debug_data_directory() + if not coverage_dir: + logging.info( + 'Failed to parse coverage data directory from test summary ' + 'output files. Not copying coverage files from the device.') + return + shutil.copytree(coverage_dir, dest, dirs_exist_ok=True) + + +def _get_vulkan_args(use_vulkan: Optional[str]) -> List[str]: + """Helper function to set vulkan related flag.""" + + vulkan_args = [] + if not use_vulkan: + if get_host_arch() == 'x64': + # TODO(crbug.com/1261646) Remove once Vulkan is enabled by + # default. + use_vulkan = 'native' + else: + # Use swiftshader on arm64 by default because most arm64 bots + # currently don't support Vulkan emulation. + use_vulkan = 'swiftshader' + vulkan_args.append('--ozone-platform=headless') + vulkan_args.append(f'--use-vulkan={use_vulkan}') + return vulkan_args + + +class ExecutableTestRunner(TestRunner): + """Test runner for running standalone test executables.""" + + def __init__( # pylint: disable=too-many-arguments + self, + out_dir: str, + test_args: List[str], + test_name: str, + target_id: Optional[str], + code_coverage_dir: str, + logs_dir: Optional[str] = None) -> None: + super().__init__(out_dir, test_args, [test_name], target_id) + if not self._test_args: + self._test_args = [] + self._test_name = test_name + self._code_coverage_dir = os.path.basename(code_coverage_dir) + self._custom_artifact_directory = None + self._isolated_script_test_output = None + self._isolated_script_test_perf_output = None + self._logs_dir = logs_dir + self._test_launcher_summary_output = None + self._test_server = None + + def _get_args(self) -> List[str]: + parser = argparse.ArgumentParser() + parser.add_argument( + '--isolated-script-test-output', + help='If present, store test results on this path.') + parser.add_argument('--isolated-script-test-perf-output', + help='If present, store chartjson results on this ' + 'path.') + parser.add_argument( + '--test-launcher-shard-index', + type=int, + default=os.environ.get('GTEST_SHARD_INDEX'), + help='Index of this instance amongst swarming shards.') + parser.add_argument( + '--test-launcher-summary-output', + help='Where the test launcher will output its json.') + parser.add_argument( + '--test-launcher-total-shards', + type=int, + default=os.environ.get('GTEST_TOTAL_SHARDS'), + help='Total number of swarming shards of this suite.') + parser.add_argument( + '--test-launcher-filter-file', + help='Filter file(s) passed to target test process. Use ";" to ' + 'separate multiple filter files.') + parser.add_argument('--test-launcher-jobs', + type=int, + help='Sets the number of parallel test jobs.') + parser.add_argument('--enable-test-server', + action='store_true', + default=False, + help='Enable Chrome test server spawner.') + parser.add_argument('--test-arg', + dest='test_args', + action='append', + help='Legacy flag to pass in arguments for ' + 'the test process. These arguments can now be ' + 'passed in without a preceding "--" flag.') + parser.add_argument('--use-vulkan', + help='\'native\', \'swiftshader\' or \'none\'.') + args, child_args = parser.parse_known_args(self._test_args) + if args.isolated_script_test_output: + self._isolated_script_test_output = args.isolated_script_test_output + child_args.append( + '--isolated-script-test-output=/custom_artifacts/%s' % + os.path.basename(self._isolated_script_test_output)) + if args.isolated_script_test_perf_output: + self._isolated_script_test_perf_output = \ + args.isolated_script_test_perf_output + child_args.append( + '--isolated-script-test-perf-output=/custom_artifacts/%s' % + os.path.basename(self._isolated_script_test_perf_output)) + if args.test_launcher_shard_index is not None: + child_args.append('--test-launcher-shard-index=%d' % + args.test_launcher_shard_index) + if args.test_launcher_total_shards is not None: + child_args.append('--test-launcher-total-shards=%d' % + args.test_launcher_total_shards) + if args.test_launcher_summary_output: + self._test_launcher_summary_output = \ + args.test_launcher_summary_output + child_args.append( + '--test-launcher-summary-output=/custom_artifacts/%s' % + os.path.basename(self._test_launcher_summary_output)) + if args.test_launcher_filter_file: + test_launcher_filter_files = map( + map_filter_file_to_package_file, + args.test_launcher_filter_file.split(';')) + child_args.append('--test-launcher-filter-file=' + + ';'.join(test_launcher_filter_files)) + if args.test_launcher_jobs is not None: + test_concurrency = args.test_launcher_jobs + else: + test_concurrency = DEFAULT_TEST_SERVER_CONCURRENCY + if args.enable_test_server: + self._test_server, spawner_url_base = setup_test_server( + self._target_id, test_concurrency) + child_args.append('--remote-test-server-spawner-url-base=%s' % + spawner_url_base) + child_args.extend(_get_vulkan_args(args.use_vulkan)) + if args.test_args: + child_args.extend(args.test_args) + return child_args + + def _postprocess(self, test_runner: FfxTestRunner) -> None: + if self._test_server: + self._test_server.Stop() + if self._test_launcher_summary_output: + _copy_custom_output_file( + test_runner, + os.path.basename(self._test_launcher_summary_output), + self._test_launcher_summary_output) + if self._isolated_script_test_output: + _copy_custom_output_file( + test_runner, + os.path.basename(self._isolated_script_test_output), + self._isolated_script_test_output) + if self._isolated_script_test_perf_output: + _copy_custom_output_file( + test_runner, + os.path.basename(self._isolated_script_test_perf_output), + self._isolated_script_test_perf_output) + _copy_coverage_files(test_runner, self._code_coverage_dir) + + def run_test(self) -> subprocess.Popen: + test_args = self._get_args() + with FfxTestRunner(self._logs_dir) as test_runner: + test_proc = test_runner.run_test( + get_component_uri(self._test_name), test_args, self._target_id) + + symbol_paths = [] + for pkg_path in self._package_deps.values(): + symbol_paths.append( + os.path.join(os.path.dirname(pkg_path), 'ids.txt')) + # Symbolize output from test process and print to terminal. + symbolizer_proc = run_symbolizer(symbol_paths, test_proc.stdout, + sys.stdout) + symbolizer_proc.communicate() + + if test_proc.wait() == 0: + logging.info('Process exited normally with status code 0.') + else: + # The test runner returns an error status code if *any* + # tests fail, so we should proceed anyway. + logging.warning('Process exited with status code %d.', + test_proc.returncode) + self._postprocess(test_runner) + return test_proc + + +def create_executable_test_runner(runner_args: argparse.Namespace, + test_args: List[str]): + """Helper for creating an ExecutableTestRunner.""" + + return ExecutableTestRunner(runner_args.out_dir, test_args, + runner_args.test_type, runner_args.target_id, + runner_args.code_coverage_dir, + runner_args.logs_dir) + + +def register_executable_test_args(parser: argparse.ArgumentParser) -> None: + """Register common arguments for ExecutableTestRunner.""" + + test_args = parser.add_argument_group('test', 'arguments for test running') + test_args.add_argument('--code-coverage-dir', + default=os.getcwd(), + help='Directory to place code coverage ' + 'information. Only relevant when the target was ' + 'built with |fuchsia_code_coverage| set to true. ' + 'Defaults to current directory.') + test_args.add_argument('--test-name', + dest='test_type', + help='Name of the test package (e.g. ' + 'unit_tests).') + + +def main(): + """Stand-alone function for running executable tests.""" + + parser = argparse.ArgumentParser() + register_common_args(parser) + register_device_args(parser) + register_log_args(parser) + register_executable_test_args(parser) + runner_args, test_args = parser.parse_known_args() + runner = create_executable_test_runner(runner_args, test_args) + return runner.run_test().returncode + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/fuchsia/test/run_pytype.py b/build/fuchsia/test/run_pytype.py new file mode 100755 index 000000000000..8e603313ca15 --- /dev/null +++ b/build/fuchsia/test/run_pytype.py @@ -0,0 +1,42 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Simple helper script to run pytype on //build/fuchsia/test code.""" + +import os +import sys + +from coveragetest import COVERED_FILES + +FUCHSIA_TEST_DIR = os.path.abspath(os.path.dirname(__file__)) +DIR_SRC_DIR = os.path.realpath(os.path.join(FUCHSIA_TEST_DIR, '..', '..', + '..')) + +sys.path.append(os.path.join(FUCHSIA_TEST_DIR, '..', '..', '..', 'testing')) + +from pytype_common import pytype_runner # pylint: disable=wrong-import-position + +EXTRA_PATHS_COMPONENTS = [ + ('build', 'util', 'lib', 'common'), +] +EXTRA_PATHS = [os.path.join(DIR_SRC_DIR, *p) for p in EXTRA_PATHS_COMPONENTS] +EXTRA_PATHS.append(FUCHSIA_TEST_DIR) + +FILES_AND_DIRECTORIES_TO_CHECK = [ + os.path.join(FUCHSIA_TEST_DIR, f) for f in COVERED_FILES +] +TEST_NAME = 'fuchsia_pytype' +TEST_LOCATION = "//build/fuchsia/test/run_pytype.py" + + +def main() -> int: + """Run pytype check.""" + + return pytype_runner.run_pytype(TEST_NAME, TEST_LOCATION, + FILES_AND_DIRECTORIES_TO_CHECK, + EXTRA_PATHS, FUCHSIA_TEST_DIR) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/fuchsia/test/run_telemetry_test.py b/build/fuchsia/test/run_telemetry_test.py new file mode 100644 index 000000000000..7556b815cb28 --- /dev/null +++ b/build/fuchsia/test/run_telemetry_test.py @@ -0,0 +1,61 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Implements commands for running GPU tests.""" + +import argparse +import os +import subprocess + +from typing import List, Optional + +from common import DIR_SRC_ROOT +from test_runner import TestRunner + +_GPU_TEST_SCRIPT = os.path.join(DIR_SRC_ROOT, 'content', 'test', 'gpu', + 'run_gpu_integration_test.py') +_PERF_TEST_SCRIPT = os.path.join(DIR_SRC_ROOT, 'tools', 'perf', + 'run_benchmark') + + +class TelemetryTestRunner(TestRunner): + """Test runner for running GPU tests.""" + + def __init__(self, test_type: str, out_dir: str, test_args: List[str], + target_id: Optional[str]) -> None: + parser = argparse.ArgumentParser() + parser.add_argument( + '--browser', help='The browser to use for Telemetry based tests.') + args, _ = parser.parse_known_args(test_args) + + if args.browser == 'web-engine-shell': + packages = ['web_engine_shell'] + elif args.browser == 'fuchsia-chrome': + packages = ['chrome'] + elif args.browser == 'cast-streaming-shell': + packages = ['cast_streaming_shell'] + else: + raise Exception('Unknown browser %s' % args.browser) + + if test_type == 'gpu': + self._test_script = _GPU_TEST_SCRIPT + elif test_type == 'perf': + self._test_script = _PERF_TEST_SCRIPT + else: + raise ValueError('Test type can only be |gpu| or |perf|.') + + super().__init__(out_dir, test_args, packages, target_id) + + # TODO(crbug.com/1345390): Remove when Telemetry tests use CFv2 components. + @staticmethod + def is_cfv2() -> bool: + return False + + def run_test(self): + test_cmd = [self._test_script] + if self._test_args: + test_cmd.extend(self._test_args) + test_cmd.extend(['--chromium-output-directory', self._out_dir]) + if self._target_id: + test_cmd.extend(['--fuchsia-target-id', self._target_id]) + return subprocess.run(test_cmd, check=True) diff --git a/build/fuchsia/test/run_test.py b/build/fuchsia/test/run_test.py new file mode 100755 index 000000000000..3fc3ac91d253 --- /dev/null +++ b/build/fuchsia/test/run_test.py @@ -0,0 +1,127 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Implements commands for running tests E2E on a Fuchsia device.""" + +import argparse +import sys +import tempfile + +from contextlib import ExitStack +from typing import List + +from common import register_common_args, register_device_args, \ + register_log_args, resolve_packages, run_ffx_command, \ + set_ffx_isolate_dir +from compatible_utils import running_unattended +from ffx_integration import ScopedFfxConfig, test_connection +from flash_device import register_update_args, update +from log_manager import LogManager, start_system_log +from publish_package import publish_packages, register_package_args +from run_blink_test import BlinkTestRunner +from run_executable_test import create_executable_test_runner, \ + register_executable_test_args +from run_telemetry_test import TelemetryTestRunner +from run_webpage_test import WebpageTestRunner +from serve_repo import register_serve_args, serve_repository +from start_emulator import create_emulator_from_args, register_emulator_args +from test_runner import TestRunner +from ermine_ctl import ErmineCtl + + +def _get_test_runner(runner_args: argparse.Namespace, + test_args: List[str]) -> TestRunner: + """Initialize a suitable TestRunner class.""" + + if runner_args.test_type == 'blink': + return BlinkTestRunner(runner_args.out_dir, test_args, + runner_args.target_id) + if runner_args.test_type in ['gpu', 'perf']: + return TelemetryTestRunner(runner_args.test_type, runner_args.out_dir, + test_args, runner_args.target_id) + if runner_args.test_type in ['webpage']: + return WebpageTestRunner(runner_args.out_dir, test_args, + runner_args.target_id) + return create_executable_test_runner(runner_args, test_args) + + +def main(): + """E2E method for installing packages and running a test.""" + parser = argparse.ArgumentParser() + parser.add_argument( + 'test_type', + help='The type of test to run. Options include \'blink\', \'gpu\', ' + 'or in the case of executable tests, the test name.') + parser.add_argument('--device', + '-d', + action='store_true', + default=False, + help='Use an existing device.') + + # Register arguments + register_common_args(parser) + register_device_args(parser) + register_emulator_args(parser) + register_executable_test_args(parser) + register_update_args(parser, default_os_check='ignore', default_pave=False) + register_log_args(parser) + register_package_args(parser, allow_temp_repo=True) + register_serve_args(parser) + + # Treat unrecognized arguments as test specific arguments. + runner_args, test_args = parser.parse_known_args() + + if not runner_args.out_dir: + raise ValueError('--out-dir must be specified.') + + if runner_args.target_id: + runner_args.device = True + + with ExitStack() as stack: + if running_unattended(): + set_ffx_isolate_dir( + stack.enter_context(tempfile.TemporaryDirectory())) + run_ffx_command(('daemon', 'stop'), check=False) + if running_unattended(): + stack.enter_context( + ScopedFfxConfig('repository.server.listen', '"[::]:0"')) + log_manager = stack.enter_context(LogManager(runner_args.logs_dir)) + if runner_args.device: + update(runner_args.system_image_dir, runner_args.os_check, + runner_args.target_id, runner_args.serial_num, + runner_args.pave) + else: + runner_args.target_id = stack.enter_context( + create_emulator_from_args(runner_args)) + + test_connection(runner_args.target_id) + + test_runner = _get_test_runner(runner_args, test_args) + package_deps = test_runner.package_deps + + if not runner_args.repo: + # Create a directory that serves as a temporary repository. + runner_args.repo = stack.enter_context( + tempfile.TemporaryDirectory()) + + publish_packages(package_deps.values(), runner_args.repo, + not runner_args.no_repo_init) + + stack.enter_context(serve_repository(runner_args)) + + # Start system logging, after all possible restarts of the ffx daemon + # so that logging will not be interrupted. + start_system_log(log_manager, False, package_deps.values(), + ('--since', 'now'), runner_args.target_id) + + ermine = ErmineCtl(runner_args.target_id) + if ermine.exists: + ermine.take_to_shell() + + resolve_packages(package_deps.keys(), runner_args.target_id) + return test_runner.run_test().returncode + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/fuchsia/test/run_webpage_test.py b/build/fuchsia/test/run_webpage_test.py new file mode 100644 index 000000000000..31fa0a32ca48 --- /dev/null +++ b/build/fuchsia/test/run_webpage_test.py @@ -0,0 +1,60 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Implements commands for running webpage tests.""" + +import argparse +import logging +import time + +from typing import List, Optional + +from common import catch_sigterm, run_continuous_ffx_command +from test_runner import TestRunner + + +class WebpageTestRunner(TestRunner): + """Test runner for running GPU tests.""" + + def __init__(self, out_dir: str, test_args: List[str], + target_id: Optional[str]) -> None: + parser = argparse.ArgumentParser() + parser.add_argument( + '--browser', + choices=['web-engine-shell', 'chrome'], + help='The browser to use for Telemetry based tests.') + args, _ = parser.parse_known_args(test_args) + + if args.browser == 'web-engine-shell': + packages = ['web_engine_shell'] + else: + packages = ['chrome'] + + super().__init__(out_dir, test_args, packages, target_id) + + def run_test(self): + catch_sigterm() + browser_cmd = [ + 'test', + 'run', + '-t', + '3600', # Keep the webpage running for an hour. + f'fuchsia-pkg://fuchsia.com/{self._packages[0]}#meta/' + f'{self._packages[0]}.cm' + ] + browser_cmd.extend( + ['--', '--web-engine-package-name=web_engine_with_webui']) + if self._test_args: + browser_cmd.extend(self._test_args) + logging.info('Starting %s', self._packages[0]) + try: + browser_proc = run_continuous_ffx_command(browser_cmd) + while True: + time.sleep(10000) + except KeyboardInterrupt: + logging.info('Ctrl-C received; shutting down the webpage.') + browser_proc.kill() + except SystemExit: + logging.info('SIGTERM received; shutting down the webpage.') + browser_proc.kill() + return browser_proc diff --git a/build/fuchsia/test/serve_repo.py b/build/fuchsia/test/serve_repo.py new file mode 100755 index 000000000000..7270bb9ba778 --- /dev/null +++ b/build/fuchsia/test/serve_repo.py @@ -0,0 +1,98 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Implements commands for serving a TUF repository.""" + +import argparse +import contextlib +import sys + +from typing import Iterator, Optional + +from common import REPO_ALIAS, register_device_args, run_ffx_command + +_REPO_NAME = 'chromium-test-package-server' + + +def _stop_serving(repo_name: str, target: Optional[str]) -> None: + """Stop serving a repository.""" + + # Attempt to clean up. + run_ffx_command(['target', 'repository', 'deregister', '-r', repo_name], + target, + check=False) + run_ffx_command(['repository', 'remove', repo_name], check=False) + run_ffx_command(['repository', 'server', 'stop'], check=False) + + +def _start_serving(repo_dir: str, repo_name: str, + target: Optional[str]) -> None: + """Start serving a repository to a target device. + + Args: + repo_dir: directory the repository is served from. + repo_name: repository name. + target: Fuchsia device the repository is served to. + """ + + run_ffx_command(('config', 'set', 'repository.server.mode', '\"ffx\"')) + + run_ffx_command(['repository', 'server', 'start']) + run_ffx_command(['repository', 'add-from-pm', repo_dir, '-r', repo_name]) + run_ffx_command([ + 'target', 'repository', 'register', '-r', repo_name, '--alias', + REPO_ALIAS + ], target) + + +def register_serve_args(arg_parser: argparse.ArgumentParser) -> None: + """Register common arguments for repository serving.""" + + serve_args = arg_parser.add_argument_group('serve', + 'repo serving arguments') + serve_args.add_argument('--serve-repo', + dest='repo', + help='Directory the repository is served from.') + serve_args.add_argument('--repo-name', + default=_REPO_NAME, + help='Name of the repository.') + + +def run_serve_cmd(cmd: str, args: argparse.Namespace) -> None: + """Helper for running serve commands.""" + + if cmd == 'start': + _start_serving(args.repo, args.repo_name, args.target_id) + else: + _stop_serving(args.repo_name, args.target_id) + + +@contextlib.contextmanager +def serve_repository(args: argparse.Namespace) -> Iterator[None]: + """Context manager for serving a repository.""" + run_serve_cmd('start', args) + try: + yield None + finally: + run_serve_cmd('stop', args) + + +def main(): + """Stand-alone function for serving a repository.""" + + parser = argparse.ArgumentParser() + parser.add_argument('cmd', + choices=['start', 'stop'], + help='Choose to start|stop repository serving.') + register_device_args(parser) + register_serve_args(parser) + args = parser.parse_args() + if args.cmd == 'start' and not args.repo: + raise ValueError('Directory the repository is serving from needs ' + 'to be specified.') + run_serve_cmd(args.cmd, args) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/fuchsia/test/serve_repo_unittests.py b/build/fuchsia/test/serve_repo_unittests.py new file mode 100755 index 000000000000..de3fa62cca61 --- /dev/null +++ b/build/fuchsia/test/serve_repo_unittests.py @@ -0,0 +1,89 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""File for testing serve_repo.py.""" + +import argparse +import unittest +import unittest.mock as mock + +import serve_repo + +from common import REPO_ALIAS + +_REPO_DIR = 'test_repo_dir' +_REPO_NAME = 'test_repo_name' +_TARGET = 'test_target' + + +class ServeRepoTest(unittest.TestCase): + """Unittests for serve_repo.py.""" + + def setUp(self) -> None: + self._namespace = argparse.Namespace(repo=_REPO_DIR, + repo_name=_REPO_NAME, + target_id=_TARGET) + + @mock.patch('serve_repo.run_ffx_command') + def test_run_serve_cmd_start(self, mock_ffx) -> None: + """Test |run_serve_cmd| function for start.""" + + serve_repo.run_serve_cmd('start', self._namespace) + self.assertEqual(mock_ffx.call_count, 4) + second_call = mock_ffx.call_args_list[1] + self.assertEqual(['repository', 'server', 'start'], second_call[0][0]) + third_call = mock_ffx.call_args_list[2] + self.assertEqual( + ['repository', 'add-from-pm', _REPO_DIR, '-r', _REPO_NAME], + third_call[0][0]) + fourth_call = mock_ffx.call_args_list[3] + self.assertEqual([ + 'target', 'repository', 'register', '-r', _REPO_NAME, '--alias', + REPO_ALIAS + ], fourth_call[0][0]) + self.assertEqual(_TARGET, fourth_call[0][1]) + + @mock.patch('serve_repo.run_ffx_command') + def test_run_serve_cmd_stop(self, mock_ffx) -> None: + """Test |run_serve_cmd| function for stop.""" + + serve_repo.run_serve_cmd('stop', self._namespace) + self.assertEqual(mock_ffx.call_count, 3) + first_call = mock_ffx.call_args_list[0] + self.assertEqual( + ['target', 'repository', 'deregister', '-r', _REPO_NAME], + first_call[0][0]) + self.assertEqual(_TARGET, first_call[0][1]) + second_call = mock_ffx.call_args_list[1] + self.assertEqual(['repository', 'remove', _REPO_NAME], + second_call[0][0]) + third_call = mock_ffx.call_args_list[2] + self.assertEqual(['repository', 'server', 'stop'], third_call[0][0]) + + @mock.patch('serve_repo.run_serve_cmd') + def test_serve_repository(self, mock_serve) -> None: + """Tests |serve_repository| context manager.""" + + with serve_repo.serve_repository(self._namespace): + self.assertEqual(mock_serve.call_count, 1) + self.assertEqual(mock_serve.call_count, 2) + + def test_main_start_no_serve_repo_flag(self) -> None: + """Tests not specifying directory for start raises a ValueError.""" + + with mock.patch('sys.argv', ['serve_repo.py', 'start']): + with self.assertRaises(ValueError): + serve_repo.main() + + @mock.patch('serve_repo.run_serve_cmd') + def test_main_stop(self, mock_serve) -> None: + """Tests |main| function.""" + + with mock.patch('sys.argv', ['serve_repo.py', 'stop']): + serve_repo.main() + self.assertEqual(mock_serve.call_count, 1) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/fuchsia/test/start_emulator.py b/build/fuchsia/test/start_emulator.py new file mode 100755 index 000000000000..cd16505f47a7 --- /dev/null +++ b/build/fuchsia/test/start_emulator.py @@ -0,0 +1,83 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Provides a class for managing emulators.""" + +import argparse +import logging +import sys +import time + +from contextlib import AbstractContextManager + +from common import catch_sigterm, register_log_args +from ffx_emulator import FfxEmulator + + +def register_emulator_args(parser: argparse.ArgumentParser, + enable_graphics: bool = False) -> None: + """Register emulator specific arguments.""" + femu_args = parser.add_argument_group('emulator', + 'emulator startup arguments.') + femu_args.add_argument('--custom-image', + dest='product_bundle', + help='Backwards compatible flag that specifies an ' + 'image used for booting up the emulator.') + if enable_graphics: + femu_args.add_argument('--disable-graphics', + action='store_false', + dest='enable_graphics', + help='Start emulator in headless mode.') + else: + femu_args.add_argument('--enable-graphics', + action='store_true', + help='Start emulator with graphics.') + femu_args.add_argument( + '--hardware-gpu', + action='store_true', + help='Use host GPU hardware instead of Swiftshader.') + femu_args.add_argument( + '--product-bundle', + help='Specify a product bundle used for booting the ' + 'emulator. Defaults to the terminal product.') + femu_args.add_argument('--with-network', + action='store_true', + help='Run emulator with emulated nic via tun/tap.') + femu_args.add_argument('--everlasting', + action='store_true', + help='If the emulator should be long-living.') + + +def create_emulator_from_args( + args: argparse.Namespace) -> AbstractContextManager: + """Helper method for initializing an FfxEmulator class with parsed + arguments.""" + return FfxEmulator(args) + + +def main(): + """Stand-alone function for starting an emulator.""" + + catch_sigterm() + logging.basicConfig(level=logging.INFO) + parser = argparse.ArgumentParser() + register_emulator_args(parser, True) + register_log_args(parser) + args = parser.parse_args() + with create_emulator_from_args(args) as target_id: + logging.info( + 'Emulator successfully started. You can now run Chrome ' + 'Fuchsia tests with --target-id=%s to target this emulator.', + target_id) + try: + while True: + time.sleep(10000) + except KeyboardInterrupt: + logging.info('Ctrl-C received; shutting down the emulator.') + except SystemExit: + logging.info('SIGTERM received; shutting down the emulator.') + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/fuchsia/test/test_runner.py b/build/fuchsia/test/test_runner.py new file mode 100644 index 000000000000..a4a2f5bf3d30 --- /dev/null +++ b/build/fuchsia/test/test_runner.py @@ -0,0 +1,74 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Provides a base class for test running.""" + +import os +import subprocess + +from abc import ABC, abstractmethod +from argparse import Namespace +from typing import Dict, List, Optional + +from common import read_package_paths + + +class TestRunner(ABC): + """Base class that handles running a test.""" + + def __init__(self, + out_dir: str, + test_args: Namespace, + packages: List[str], + target_id: Optional[str] = None) -> None: + self._target_id = target_id + self._out_dir = out_dir + self._test_args = test_args + self._packages = packages + self._package_deps = None + + # TODO(crbug.com/1256503): Remove when all tests are converted to CFv2. + @staticmethod + def is_cfv2() -> bool: + """ + Returns True if packages are CFv2, False otherwise. Subclasses can + override this and return False if needed. + """ + + return True + + @property + def package_deps(self) -> Dict[str, str]: + """ + Returns: + A dictionary of packages that |self._packages| depend on, with + mapping from the package name to the local path to its far file. + """ + + if not self._package_deps: + self._populate_package_deps() + return self._package_deps + + def _populate_package_deps(self) -> None: + """Retrieve information for all packages |self._packages| depend on. + """ + + package_deps = {} + + package_paths = [] + for package in self._packages: + package_paths.extend(read_package_paths(self._out_dir, package)) + + for path in package_paths: + package_name = os.path.basename(path).replace('.far', '') + if package_name in package_deps: + assert path == package_deps[package_name] + package_deps[package_name] = path + self._package_deps = package_deps + + @abstractmethod + def run_test(self) -> subprocess.Popen: + """ + Returns: + A subprocess.Popen object that ran the test command. + """ diff --git a/build/fuchsia/test/test_server.py b/build/fuchsia/test/test_server.py new file mode 100644 index 000000000000..c2ed3d23584a --- /dev/null +++ b/build/fuchsia/test/test_server.py @@ -0,0 +1,130 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Test server set up.""" + +import logging +import os +import sys +import subprocess + +from typing import List, Optional, Tuple + +from common import DIR_SRC_ROOT, run_ffx_command +from compatible_utils import get_ssh_prefix + +sys.path.append(os.path.join(DIR_SRC_ROOT, 'build', 'util', 'lib', 'common')) +# pylint: disable=import-error,wrong-import-position +import chrome_test_server_spawner +# pylint: enable=import-error,wrong-import-position + + +def port_forward(host_port_pair: str, host_port: int) -> int: + """Establishes a port forwarding SSH task to a localhost TCP endpoint + hosted at port |local_port|. Blocks until port forwarding is established. + + Returns the remote port number.""" + + ssh_prefix = get_ssh_prefix(host_port_pair) + + # Allow a tunnel to be established. + subprocess.run(ssh_prefix + ['echo', 'true'], check=True) + + forward_cmd = [ + '-O', + 'forward', # Send SSH mux control signal. + '-R', + '0:localhost:%d' % host_port, + '-v', # Get forwarded port info from stderr. + '-NT' # Don't execute command; don't allocate terminal. + ] + forward_proc = subprocess.run(ssh_prefix + forward_cmd, + capture_output=True, + check=False, + text=True) + if forward_proc.returncode != 0: + raise Exception( + 'Got an error code when requesting port forwarding: %d' % + forward_proc.returncode) + + output = forward_proc.stdout + parsed_port = int(output.splitlines()[0].strip()) + logging.debug('Port forwarding established (local=%d, device=%d)', + host_port, parsed_port) + return parsed_port + + +# Disable pylint errors since the subclass is not from this directory. +# pylint: disable=invalid-name,missing-function-docstring +class SSHPortForwarder(chrome_test_server_spawner.PortForwarder): + """Implementation of chrome_test_server_spawner.PortForwarder that uses + SSH's remote port forwarding feature to forward ports.""" + + def __init__(self, host_port_pair: str) -> None: + self._host_port_pair = host_port_pair + + # Maps the host (server) port to the device port number. + self._port_mapping = {} + + def Map(self, port_pairs: List[Tuple[int, int]]) -> None: + for p in port_pairs: + _, host_port = p + self._port_mapping[host_port] = \ + port_forward(self._host_port_pair, host_port) + + def GetDevicePortForHostPort(self, host_port: int) -> int: + return self._port_mapping[host_port] + + def Unmap(self, device_port: int) -> None: + for host_port, entry in self._port_mapping.items(): + if entry == device_port: + ssh_prefix = get_ssh_prefix(self._host_port_pair) + unmap_cmd = [ + '-NT', '-O', 'cancel', '-R', + '0:localhost:%d' % host_port + ] + ssh_proc = subprocess.run(ssh_prefix + unmap_cmd, check=False) + if ssh_proc.returncode != 0: + raise Exception('Error %d when unmapping port %d' % + (ssh_proc.returncode, device_port)) + del self._port_mapping[host_port] + return + + raise Exception('Unmap called for unknown port: %d' % device_port) + + +# pylint: enable=invalid-name,missing-function-docstring + + +def setup_test_server(target_id: Optional[str], test_concurrency: int)\ + -> Tuple[chrome_test_server_spawner.SpawningServer, str]: + """Provisions a test server and configures |target_id| to use it. + + Args: + target_id: The target to which port forwarding to the test server will + be established. + test_concurrency: The number of parallel test jobs that will be run. + + Returns a tuple of a SpawningServer object and the local url to use on + |target_id| to reach the test server.""" + + logging.debug('Starting test server.') + + host_port_pair = run_ffx_command(('target', 'get-ssh-address'), + target_id, + capture_output=True).stdout.strip() + + # The TestLauncher can launch more jobs than the limit specified with + # --test-launcher-jobs so the max number of spawned test servers is set to + # twice that limit here. See https://crbug.com/913156#c19. + spawning_server = chrome_test_server_spawner.SpawningServer( + 0, SSHPortForwarder(host_port_pair), test_concurrency * 2) + + forwarded_port = port_forward(host_port_pair, spawning_server.server_port) + spawning_server.Start() + + logging.debug('Test server listening for connections (port=%d)', + spawning_server.server_port) + logging.debug('Forwarded port is %d', forwarded_port) + + return (spawning_server, 'http://localhost:%d' % forwarded_port) diff --git a/build/fuchsia/test/test_server_unittests.py b/build/fuchsia/test/test_server_unittests.py new file mode 100755 index 000000000000..f601884956e1 --- /dev/null +++ b/build/fuchsia/test/test_server_unittests.py @@ -0,0 +1,84 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""File for testing test_server.py.""" + +import unittest +import unittest.mock as mock + +import test_server + +_HOST_PORT = 44444 +_HOST_PORT_PAIR = '127.0.0.1:33333' +_SERVER_PORT = 55555 + + +class TestServerTest(unittest.TestCase): + """Unittests for test_server.py.""" + + def setUp(self) -> None: + self._subprocess_patcher = mock.patch('test_server.subprocess.run') + self._log_patcher = mock.patch('test_server.logging.debug') + self._subprocess_mock = self._subprocess_patcher.start() + self._log_mock = self._log_patcher.start() + self.addCleanup(self._log_mock.stop) + self.addCleanup(self._subprocess_mock.stop) + + def test_ssh_port_forwarder(self) -> None: + """Test SSHPortForwarder.""" + + port_pair = (_HOST_PORT, _SERVER_PORT) + cmd_mock = mock.Mock() + cmd_mock.returncode = 0 + cmd_mock.stdout = str(port_pair[0]) + self._subprocess_mock.return_value = cmd_mock + + forwarder = test_server.SSHPortForwarder(_HOST_PORT_PAIR) + + # Unmap should raise an exception if no ports are mapped. + with self.assertRaises(Exception): + forwarder.Unmap(port_pair[0]) + + forwarder.Map([port_pair]) + self.assertEqual(self._subprocess_mock.call_count, 2) + self.assertEqual(forwarder.GetDevicePortForHostPort(port_pair[1]), + port_pair[0]) + + # Unmap should also raise an exception if the unmap command fails. + self._subprocess_mock.reset_mock() + cmd_mock.returncode = 1 + with self.assertRaises(Exception): + forwarder.Unmap(port_pair[0]) + self.assertEqual(self._subprocess_mock.call_count, 1) + + self._subprocess_mock.reset_mock() + cmd_mock.returncode = 0 + forwarder.Unmap(port_pair[0]) + self.assertEqual(self._subprocess_mock.call_count, 1) + + def test_port_forward_exception(self) -> None: + """Tests that exception is raised if |port_forward| command fails.""" + + cmd_mock = mock.Mock() + cmd_mock.returncode = 1 + self._subprocess_mock.return_value = cmd_mock + with self.assertRaises(Exception): + test_server.port_forward(_HOST_PORT_PAIR, _HOST_PORT) + + @mock.patch('test_server.chrome_test_server_spawner.SpawningServer') + @mock.patch('test_server.port_forward') + def test_setup_test_server(self, forward_mock, server_mock) -> None: + """Test |setup_test_server|.""" + + forward_mock.return_value = _HOST_PORT + server = test_server.chrome_test_server_spawner.SpawningServer + server.Start = mock.Mock() + server_mock.return_value = server + with mock.patch('test_server.run_ffx_command'): + _, url = test_server.setup_test_server(_HOST_PORT_PAIR, 4) + self.assertTrue(str(_HOST_PORT) in url) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/fuchsia/test_runner.py b/build/fuchsia/test_runner.py deleted file mode 100755 index afecdddea1d1..000000000000 --- a/build/fuchsia/test_runner.py +++ /dev/null @@ -1,257 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Deploys and runs a test package on a Fuchsia target.""" - -import argparse -import os -import runner_logs -import sys - -from common_args import AddCommonArgs, AddTargetSpecificArgs, \ - ConfigureLogging, GetDeploymentTargetForArgs -from net_test_server import SetupTestServer -from run_test_package import RunTestPackage, RunTestPackageArgs, SystemLogReader -from runner_exceptions import HandleExceptionAndReturnExitCode -from runner_logs import RunnerLogManager -from symbolizer import BuildIdsPaths - -DEFAULT_TEST_SERVER_CONCURRENCY = 4 - -TEST_DATA_DIR = '/tmp' -TEST_FILTER_PATH = TEST_DATA_DIR + '/test_filter.txt' -TEST_LLVM_PROFILE_PATH = TEST_DATA_DIR + '/llvm-profile' -TEST_PERF_RESULT_PATH = TEST_DATA_DIR + '/test_perf_summary.json' -TEST_RESULT_PATH = TEST_DATA_DIR + '/test_summary.json' - -TEST_REALM_NAME = 'chromium_tests' - - -def AddTestExecutionArgs(arg_parser): - test_args = arg_parser.add_argument_group('testing', - 'Test execution arguments') - test_args.add_argument('--gtest_filter', - help='GTest filter to use in place of any default.') - test_args.add_argument( - '--gtest_repeat', - help='GTest repeat value to use. This also disables the ' - 'test launcher timeout.') - test_args.add_argument( - '--test-launcher-retry-limit', - help='Number of times that test suite will retry failing ' - 'tests. This is multiplicative with --gtest_repeat.') - test_args.add_argument('--test-launcher-shard-index', - type=int, - default=os.environ.get('GTEST_SHARD_INDEX'), - help='Index of this instance amongst swarming shards.') - test_args.add_argument('--test-launcher-total-shards', - type=int, - default=os.environ.get('GTEST_TOTAL_SHARDS'), - help='Total number of swarming shards of this suite.') - test_args.add_argument('--gtest_break_on_failure', - action='store_true', - default=False, - help='Should GTest break on failure; useful with ' - '--gtest_repeat.') - test_args.add_argument('--single-process-tests', - action='store_true', - default=False, - help='Runs the tests and the launcher in the same ' - 'process. Useful for debugging.') - test_args.add_argument('--test-launcher-batch-limit', - type=int, - help='Sets the limit of test batch to run in a single ' - 'process.') - # --test-launcher-filter-file is specified relative to --out-dir, - # so specifying type=os.path.* will break it. - test_args.add_argument( - '--test-launcher-filter-file', - default=None, - help='Override default filter file passed to target test ' - 'process. Set an empty path to disable filtering.') - test_args.add_argument('--test-launcher-jobs', - type=int, - help='Sets the number of parallel test jobs.') - test_args.add_argument('--test-launcher-summary-output', - help='Where the test launcher will output its json.') - test_args.add_argument('--enable-test-server', - action='store_true', - default=False, - help='Enable Chrome test server spawner.') - test_args.add_argument( - '--test-launcher-bot-mode', - action='store_true', - default=False, - help='Informs the TestLauncher to that it should enable ' - 'special allowances for running on a test bot.') - test_args.add_argument('--isolated-script-test-output', - help='If present, store test results on this path.') - test_args.add_argument( - '--isolated-script-test-perf-output', - help='If present, store chartjson results on this path.') - test_args.add_argument('--use-run-test-component', - default=False, - action='store_true', - help='Run the test package hermetically using ' - 'run-test-component, rather than run.') - test_args.add_argument( - '--code-coverage', - default=False, - action='store_true', - help='Gather code coverage information and place it in ' - 'the output directory.') - test_args.add_argument('--code-coverage-dir', - default=os.getcwd(), - help='Directory to place code coverage information. ' - 'Only relevant when --code-coverage set to true. ' - 'Defaults to current directory.') - test_args.add_argument('--child-arg', - action='append', - help='Arguments for the test process.') - test_args.add_argument('child_args', - nargs='*', - help='Arguments for the test process.') - - -def main(): - parser = argparse.ArgumentParser() - AddTestExecutionArgs(parser) - AddCommonArgs(parser) - AddTargetSpecificArgs(parser) - args = parser.parse_args() - - # Flag out_dir is required for tests launched with this script. - if not args.out_dir: - raise ValueError("out-dir must be specified.") - - # Code coverage uses runtests, which calls run_test_component. - if args.code_coverage: - args.use_run_test_component = True - - ConfigureLogging(args) - - child_args = [] - if args.test_launcher_shard_index != None: - child_args.append( - '--test-launcher-shard-index=%d' % args.test_launcher_shard_index) - if args.test_launcher_total_shards != None: - child_args.append( - '--test-launcher-total-shards=%d' % args.test_launcher_total_shards) - if args.single_process_tests: - child_args.append('--single-process-tests') - if args.test_launcher_bot_mode: - child_args.append('--test-launcher-bot-mode') - if args.test_launcher_batch_limit: - child_args.append('--test-launcher-batch-limit=%d' % - args.test_launcher_batch_limit) - - # Only set --test-launcher-jobs if the caller specifies it, in general. - # If the caller enables the test-server then we need to launch the right - # number of instances to match the maximum number of parallel test jobs, so - # in that case we set --test-launcher-jobs based on the number of CPU cores - # specified for the emulator to use. - test_concurrency = None - if args.test_launcher_jobs: - test_concurrency = args.test_launcher_jobs - elif args.enable_test_server: - if args.device == 'device': - test_concurrency = DEFAULT_TEST_SERVER_CONCURRENCY - else: - test_concurrency = args.cpu_cores - if test_concurrency: - child_args.append('--test-launcher-jobs=%d' % test_concurrency) - - if args.gtest_filter: - child_args.append('--gtest_filter=' + args.gtest_filter) - if args.gtest_repeat: - child_args.append('--gtest_repeat=' + args.gtest_repeat) - child_args.append('--test-launcher-timeout=-1') - if args.test_launcher_retry_limit: - child_args.append( - '--test-launcher-retry-limit=' + args.test_launcher_retry_limit) - if args.gtest_break_on_failure: - child_args.append('--gtest_break_on_failure') - if args.test_launcher_summary_output: - child_args.append('--test-launcher-summary-output=' + TEST_RESULT_PATH) - if args.isolated_script_test_output: - child_args.append('--isolated-script-test-output=' + TEST_RESULT_PATH) - if args.isolated_script_test_perf_output: - child_args.append('--isolated-script-test-perf-output=' + - TEST_PERF_RESULT_PATH) - - if args.child_arg: - child_args.extend(args.child_arg) - if args.child_args: - child_args.extend(args.child_args) - - test_realms = [] - if args.use_run_test_component: - test_realms = [TEST_REALM_NAME] - - try: - with GetDeploymentTargetForArgs(args) as target, \ - SystemLogReader() as system_logger, \ - RunnerLogManager(args.runner_logs_dir, BuildIdsPaths(args.package)): - target.Start() - - if args.system_log_file and args.system_log_file != '-': - system_logger.Start(target, args.package, args.system_log_file) - - if args.test_launcher_filter_file: - target.PutFile(args.test_launcher_filter_file, - TEST_FILTER_PATH, - for_package=args.package_name, - for_realms=test_realms) - child_args.append('--test-launcher-filter-file=' + TEST_FILTER_PATH) - - test_server = None - if args.enable_test_server: - assert test_concurrency - test_server = SetupTestServer(target, test_concurrency, - args.package_name, test_realms) - - run_package_args = RunTestPackageArgs.FromCommonArgs(args) - if args.use_run_test_component: - run_package_args.test_realm_label = TEST_REALM_NAME - run_package_args.use_run_test_component = True - returncode = RunTestPackage(args.out_dir, target, args.package, - args.package_name, child_args, - run_package_args) - - if test_server: - test_server.Stop() - - if args.code_coverage: - # Copy all the files in the profile directory. /* is used instead - # of recursively copying due to permission issues for the latter. - target.GetFile(TEST_LLVM_PROFILE_PATH + '/*', args.code_coverage_dir) - - if args.test_launcher_summary_output: - target.GetFile(TEST_RESULT_PATH, - args.test_launcher_summary_output, - for_package=args.package_name, - for_realms=test_realms) - - if args.isolated_script_test_output: - target.GetFile(TEST_RESULT_PATH, - args.isolated_script_test_output, - for_package=args.package_name, - for_realms=test_realms) - - if args.isolated_script_test_perf_output: - target.GetFile(TEST_PERF_RESULT_PATH, - args.isolated_script_test_perf_output, - for_package=args.package_name, - for_realms=test_realms) - - return returncode - - except: - return HandleExceptionAndReturnExitCode() - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/build/fuchsia/update_images.py b/build/fuchsia/update_images.py index 79b8e49d8643..5251f98e4489 100755 --- a/build/fuchsia/update_images.py +++ b/build/fuchsia/update_images.py @@ -1,30 +1,95 @@ -#!/usr/bin/env python -# Copyright 2020 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -"""Updates the Fuchsia SDK to the given revision. Should be used in a 'hooks_os' -entry so that it only runs when .gclient's target_os includes 'fuchsia'.""" +"""Updates the Fuchsia images to the given revision. Should be used in a +'hooks_os' entry so that it only runs when .gclient's target_os includes +'fuchsia'.""" import argparse import itertools import logging import os import re -import shutil import subprocess import sys -import tarfile +from typing import Dict, Optional -from common import GetHostOsFromPlatform, GetHostArchFromPlatform, \ - DIR_SOURCE_ROOT, IMAGES_ROOT -from update_sdk import DownloadAndUnpackFromCloudStorage, \ - GetOverrideCloudStorageBucket, GetSdkHash, \ - MakeCleanDirectory, SDK_SIGNATURE_FILE +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + 'test'))) +from common import DIR_SRC_ROOT, IMAGES_ROOT, get_host_os, \ + make_clean_directory -def GetSdkSignature(sdk_hash, boot_images): - return 'gn:{sdk_hash}:{boot_images}:'.format(sdk_hash=sdk_hash, - boot_images=boot_images) +from gcs_download import DownloadAndUnpackFromCloudStorage + +from update_sdk import GetSDKOverrideGCSPath + +IMAGE_SIGNATURE_FILE = '.hash' + + +# TODO(crbug.com/1138433): Investigate whether we can deprecate +# use of sdk_bucket.txt. +def GetOverrideCloudStorageBucket(): + """Read bucket entry from sdk_bucket.txt""" + return ReadFile('sdk-bucket.txt').strip() + + +def ReadFile(filename): + """Read a file in this directory.""" + with open(os.path.join(os.path.dirname(__file__), filename), 'r') as f: + return f.read() + + +def StrExpansion(): + return lambda str_value: str_value + + +def VarLookup(local_scope): + return lambda var_name: local_scope['vars'][var_name] + + +def GetImageHashList(bucket): + """Read filename entries from sdk-hash-files.list (one per line), substitute + {platform} in each entry if present, and read from each filename.""" + assert (get_host_os() == 'linux') + filenames = [ + line.strip() for line in ReadFile('sdk-hash-files.list').replace( + '{platform}', 'linux_internal').splitlines() + ] + image_hashes = [ReadFile(filename).strip() for filename in filenames] + return image_hashes + + +def ParseDepsDict(deps_content): + local_scope = {} + global_scope = { + 'Str': StrExpansion(), + 'Var': VarLookup(local_scope), + 'deps_os': {}, + } + exec(deps_content, global_scope, local_scope) + return local_scope + + +def ParseDepsFile(filename): + with open(filename, 'rb') as f: + deps_content = f.read() + return ParseDepsDict(deps_content) + + +def GetImageHash(bucket): + """Gets the hash identifier of the newest generation of images.""" + if bucket == 'fuchsia-sdk': + hashes = GetImageHashList(bucket) + return max(hashes) + deps_file = os.path.join(DIR_SRC_ROOT, 'DEPS') + return ParseDepsFile(deps_file)['vars']['fuchsia_version'].split(':')[1] + + +def GetImageSignature(image_hash, boot_images): + return 'gn:{image_hash}:{boot_images}:'.format(image_hash=image_hash, + boot_images=boot_images) def GetAllImages(boot_image_names): @@ -48,7 +113,7 @@ def GetAllImages(boot_image_names): return images_to_download -def DownloadSdkBootImages(bucket, sdk_hash, boot_image_names, image_root_dir): +def DownloadBootImages(bucket, image_hash, boot_image_names, image_root_dir): images_to_download = GetAllImages(boot_image_names) for image_to_download in images_to_download: device_type = image_to_download[0] @@ -57,23 +122,72 @@ def DownloadSdkBootImages(bucket, sdk_hash, boot_image_names, image_root_dir): if os.path.exists(image_output_dir): continue - logging.info('Downloading Fuchsia boot images for %s.%s...' % - (device_type, arch)) - if bucket == 'fuchsia-sdk': - images_tarball_url = 'gs://{bucket}/development/{sdk_hash}/images/'\ - '{device_type}.{arch}.tgz'.format( - bucket=bucket, sdk_hash=sdk_hash, - device_type=device_type, arch=arch) + logging.info('Downloading Fuchsia boot images for %s.%s...', device_type, + arch) + + # Legacy images use different naming conventions. See fxbug.dev/85552. + legacy_delimiter_device_types = ['qemu', 'generic'] + if bucket == 'fuchsia-sdk' or \ + device_type not in legacy_delimiter_device_types: + type_arch_connector = '.' else: - images_tarball_url = 'gs://{bucket}/development/{sdk_hash}/images/'\ - '{device_type}-{arch}.tgz'.format( - bucket=bucket, sdk_hash=sdk_hash, - device_type=device_type, arch=arch) - DownloadAndUnpackFromCloudStorage(images_tarball_url, image_output_dir) + type_arch_connector = '-' + + images_tarball_url = 'gs://{bucket}/development/{image_hash}/images/'\ + '{device_type}{type_arch_connector}{arch}.tgz'.format( + bucket=bucket, image_hash=image_hash, device_type=device_type, + type_arch_connector=type_arch_connector, arch=arch) + try: + DownloadAndUnpackFromCloudStorage(images_tarball_url, image_output_dir) + except subprocess.CalledProcessError as e: + logging.exception('Failed to download image %s from URL: %s', + image_to_download, images_tarball_url) + raise e + + +def _GetImageOverrideInfo() -> Optional[Dict[str, str]]: + """Get the bucket location from sdk_override.txt.""" + location = GetSDKOverrideGCSPath() + if not location: + return None + + m = re.match(r'gs://([^/]+)/development/([^/]+)/?(?:sdk)?', location) + if not m: + raise ValueError('Badly formatted image override location %s' % location) + + return { + 'bucket': m.group(1), + 'image_hash': m.group(2), + } + + +def GetImageLocationInfo(default_bucket: str, + allow_override: bool = True) -> Dict[str, str]: + """Figures out where to pull the image from. + Defaults to the provided default bucket and generates the hash from defaults. + If sdk_override.txt exists (and is allowed) it uses that bucket instead. -def GetNewSignature(sdk_hash, boot_images): - return GetSdkSignature(sdk_hash, boot_images) + Args: + default_bucket: a given default for what bucket to use + allow_override: allow SDK override to be used. + + Returns: + A dictionary containing the bucket and image_hash + """ + # if sdk_override.txt exists (and is allowed) use the image from that bucket. + if allow_override: + override = _GetImageOverrideInfo() + if override: + return override + + # Use the bucket in sdk-bucket.txt if an entry exists. + # Otherwise use the default bucket. + bucket = GetOverrideCloudStorageBucket() or default_bucket + return { + 'bucket': bucket, + 'image_hash': GetImageHash(bucket), + } def main(): @@ -98,6 +212,12 @@ def main(): '--image-root-dir', default=IMAGES_ROOT, help='Specify the root directory of the downloaded images. Optional') + parser.add_argument( + '--allow-override', + default=True, + type=bool, + help='Whether sdk_override.txt can be used for fetching the image, if ' + 'it exists.') args = parser.parse_args() logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) @@ -106,34 +226,38 @@ def main(): if not args.boot_images: return 0 - # Check whether there's SDK support for this platform. - GetHostOsFromPlatform() + # Check whether there's Fuchsia support for this platform. + get_host_os() - # Use the bucket in sdk-bucket.txt if an entry exists. - # Otherwise use the default bucket. - bucket = GetOverrideCloudStorageBucket() or args.default_bucket + image_info = GetImageLocationInfo(args.default_bucket, args.allow_override) - sdk_hash = GetSdkHash(bucket) - if not sdk_hash: + bucket = image_info['bucket'] + image_hash = image_info['image_hash'] + + if not image_hash: return 1 - signature_filename = os.path.join(args.image_root_dir, SDK_SIGNATURE_FILE) + signature_filename = os.path.join(args.image_root_dir, IMAGE_SIGNATURE_FILE) current_signature = (open(signature_filename, 'r').read().strip() if os.path.exists(signature_filename) else '') - new_signature = GetNewSignature(sdk_hash, args.boot_images) + new_signature = GetImageSignature(image_hash, args.boot_images) if current_signature != new_signature: - logging.info('Downloading Fuchsia images %s...' % sdk_hash) - MakeCleanDirectory(args.image_root_dir) + logging.info('Downloading Fuchsia images %s from bucket %s...', image_hash, + bucket) + make_clean_directory(args.image_root_dir) try: - DownloadSdkBootImages(bucket, sdk_hash, args.boot_images, - args.image_root_dir) + DownloadBootImages(bucket, image_hash, args.boot_images, + args.image_root_dir) with open(signature_filename, 'w') as f: f.write(new_signature) - except subprocess.CalledProcessError as e: - logging.error(("command '%s' failed with status %d.%s"), " ".join(e.cmd), - e.returncode, " Details: " + e.output if e.output else "") + logging.exception("command '%s' failed with status %d.%s", + ' '.join(e.cmd), e.returncode, + ' Details: ' + e.output if e.output else '') + raise e + else: + logging.info('Signatures matched! Got %s', new_signature) return 0 diff --git a/build/fuchsia/update_images_test.py b/build/fuchsia/update_images_test.py new file mode 100755 index 000000000000..f5be774cd316 --- /dev/null +++ b/build/fuchsia/update_images_test.py @@ -0,0 +1,97 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import unittest +from unittest import mock + +from parameterized import parameterized + +from update_images import _GetImageOverrideInfo +from update_images import GetImageLocationInfo + + +@mock.patch('update_images.GetSDKOverrideGCSPath') +class TestGetImageOverrideInfo(unittest.TestCase): + def testLocationIsNone(self, mock_sdk_loc): + mock_sdk_loc.return_value = None + + actual = _GetImageOverrideInfo() + self.assertIsNone(actual) + + def testBadLocationStr(self, mock_sdk_loc): + mock_sdk_loc.return_value = 'bad-format-string' + + with self.assertRaises(Exception): + _GetImageOverrideInfo() + + @parameterized.expand([ + ('gs://my-bucket/development/my-hash/sdk', { + 'bucket': 'my-bucket', + 'image_hash': 'my-hash' + }), + ('gs://my-bucket/development/my-hash', { + 'bucket': 'my-bucket', + 'image_hash': 'my-hash' + }), + ('gs://my-bucket/development/my-hash/', { + 'bucket': 'my-bucket', + 'image_hash': 'my-hash' + }), + ]) + def testValidLocation(self, mock_sdk_loc, in_path, expected): + mock_sdk_loc.return_value = in_path + + actual = _GetImageOverrideInfo() + self.assertEqual(actual, expected) + + +@mock.patch('update_images.GetImageHash') +@mock.patch('update_images.GetOverrideCloudStorageBucket') +@mock.patch('update_images._GetImageOverrideInfo') +class TestGetImageLocationInfo(unittest.TestCase): + def testNoOverride(self, mock_image_override, mock_override_bucket, + mock_image_hash): + mock_image_override.return_value = None + mock_override_bucket.return_value = None + mock_image_hash.return_value = 'image-hash' + + actual = GetImageLocationInfo('my-bucket') + self.assertEqual(actual, { + 'bucket': 'my-bucket', + 'image_hash': 'image-hash', + }) + + def testOverride(self, mock_image_override, mock_override_bucket, + mock_image_hash): + override_info = { + 'bucket': 'override-bucket', + 'image_hash': 'override-hash', + } + mock_image_override.return_value = override_info + mock_override_bucket.return_value = None + mock_image_hash.return_value = 'image-hash' + + actual = GetImageLocationInfo('my-bucket') + self.assertEqual(actual, override_info) + + def testNoAllowOverride(self, mock_image_override, mock_override_bucket, + mock_image_hash): + override_info = { + 'bucket': 'override-bucket', + 'image_hash': 'override-hash', + } + mock_image_override.return_value = override_info + mock_override_bucket.return_value = None + mock_image_hash.return_value = 'image-hash' + + actual = GetImageLocationInfo('my-bucket', allow_override=False) + self.assertEqual(actual, { + 'bucket': 'my-bucket', + 'image_hash': 'image-hash', + }) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/fuchsia/update_product_bundles.py b/build/fuchsia/update_product_bundles.py new file mode 100755 index 000000000000..79ad3970964e --- /dev/null +++ b/build/fuchsia/update_product_bundles.py @@ -0,0 +1,359 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Updates the Fuchsia product bundles to the given revision. Should be used +in a 'hooks_os' entry so that it only runs when .gclient's target_os includes +'fuchsia'.""" + +import argparse +import json +import logging +import os +import re +import subprocess +import sys + +from contextlib import ExitStack + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + 'test'))) + +import common +import ffx_integration + +_PRODUCT_BUNDLES = [ + 'core.x64-dfv2', + 'terminal.qemu-arm64', + 'terminal.qemu-x64', + 'workstation_eng.chromebook-x64', + 'workstation_eng.chromebook-x64-dfv2', + 'workstation_eng.qemu-x64', + 'workstation_eng.x64', +] + +# TODO(crbug/1361089): Remove when the old scripts have been deprecated. +_IMAGE_TO_PRODUCT_BUNDLE = { + 'core.x64-dfv2-release': 'core.x64-dfv2', + 'qemu.arm64': 'terminal.qemu-arm64', + 'qemu.x64': 'terminal.qemu-x64', + 'workstation_eng.chromebook-x64-dfv2-release': + 'workstation_eng.chromebook-x64-dfv2', + 'workstation_eng.chromebook-x64-release': 'workstation_eng.chromebook-x64', + 'workstation_eng.qemu-x64-release': 'workstation_eng.qemu-x64', +} + + +_PRODUCT_BUNDLE_FIX_INSTRUCTIONS = ( + 'This could be because an earlier version of the product bundle was not ' + 'properly removed. Run |ffx product-bundle list| and |ffx repository list|,' + ' remove the available product bundles listed using ' + '|ffx product-bundle remove| and |ffx repository remove|, ' + f'remove the directory {common.IMAGES_ROOT} and rerun hooks/this script.') + + +# TODO(crbug/1361089): Remove when the old scripts have been deprecated. +def convert_to_product_bundle(images_list): + """Convert image names in the SDK to product bundle names.""" + + product_bundle_list = [] + for image in images_list: + if image in _IMAGE_TO_PRODUCT_BUNDLE: + logging.warning(f'Image name {image} has been deprecated. Use ' + f'{_IMAGE_TO_PRODUCT_BUNDLE.get(image)} instead.') + product_bundle_list.append(_IMAGE_TO_PRODUCT_BUNDLE.get(image, image)) + return product_bundle_list + + +def get_hash_from_sdk(): + """Retrieve version info from the SDK.""" + + version_file = os.path.join(common.SDK_ROOT, 'meta', 'manifest.json') + if not os.path.exists(version_file): + raise RuntimeError('Could not detect version file. Make sure the SDK has ' + 'been downloaded') + with open(version_file, 'r') as f: + return json.load(f)['id'] + + +def remove_repositories(repo_names_to_remove): + """Removes given repos from repo list. + Repo MUST be present in list to succeed. + + Args: + repo_names_to_remove: List of repo names (as strings) to remove. + """ + for repo_name in repo_names_to_remove: + common.run_ffx_command(('repository', 'remove', repo_name), check=True) + + +def get_repositories(): + """Lists repositories that are available on disk. + + Also prunes repositories that are listed, but do not have an actual packages + directory. + + Returns: + List of dictionaries containing info about the repositories. They have the + following structure: + { + 'name': , + 'spec': { + 'type': , + 'path': + }, + } + """ + + repos = json.loads( + common.run_ffx_command(('--machine', 'json', 'repository', 'list'), + check=True, + capture_output=True).stdout.strip()) + to_prune = set() + sdk_root_abspath = os.path.abspath(os.path.dirname(common.SDK_ROOT)) + for repo in repos: + # Confirm the path actually exists. If not, prune list. + # Also assert the product-bundle repository is for the current repo + # (IE within the same directory). + if not os.path.exists(repo['spec']['path']): + to_prune.add(repo['name']) + + if not repo['spec']['path'].startswith(sdk_root_abspath): + to_prune.add(repo['name']) + + repos = [repo for repo in repos if repo['name'] not in to_prune] + + remove_repositories(to_prune) + return repos + + +def update_repositories_list(): + """Used to prune stale repositories.""" + get_repositories() + + +def remove_product_bundle(product_bundle): + """Removes product-bundle given.""" + common.run_ffx_command(('product-bundle', 'remove', '-f', product_bundle)) + + +def get_product_bundle_urls(): + """Retrieves URLs of available product-bundles. + + Returns: + List of dictionaries of structure, indicating whether the product-bundle + has been downloaded. + { + 'url': , + 'downloaded': + } + """ + # TODO(fxb/115328): Replaces with JSON API when available. + bundles = common.run_ffx_command(('product-bundle', 'list'), + capture_output=True).stdout.strip() + urls = [ + line.strip() for line in bundles.splitlines() if 'gs://fuchsia' in line + ] + structured_urls = [] + for url in urls: + downloaded = False + if '*' in url: + downloaded = True + url = url.split(' ')[1] + structured_urls.append({'downloaded': downloaded, 'url': url.strip()}) + return structured_urls + + +def keep_product_bundles_by_sdk_version(sdk_version): + """Prunes product bundles not containing the sdk_version given.""" + urls = get_product_bundle_urls() + for url in urls: + if url['downloaded'] and sdk_version not in url['url']: + remove_product_bundle(url['url']) + + +def get_product_bundles(): + """Lists all downloaded product-bundles for the given SDK. + + Cross-references the repositories with downloaded packages and the stated + downloaded product-bundles to validate whether or not a product-bundle is + present. Prunes invalid product-bundles with each call as well. + + Returns: + List of strings of product-bundle names downloaded and that FFX is aware + of. + """ + downloaded_bundles = [] + + for url in get_product_bundle_urls(): + if url['downloaded']: + # The product is separated by a # + product = url['url'].split('#') + downloaded_bundles.append(product[1]) + + repos = get_repositories() + + # Some repo names do not match product-bundle names due to underscores. + # Normalize them both. + repo_names = set([repo['name'].replace('-', '_') for repo in repos]) + + def bundle_is_active(name): + # Returns True if the product-bundle named `name` is present in a package + # repository (assuming it is downloaded already); otherwise, removes the + # product-bundle and returns False. + if name.replace('-', '_') in repo_names: + return True + + remove_product_bundle(name) + return False + + return list(filter(bundle_is_active, downloaded_bundles)) + + +def download_product_bundle(product_bundle, download_config): + """Download product bundles using the SDK.""" + # This also updates the repository list, in case it is stale. + update_repositories_list() + + try: + common.run_ffx_command( + ('product-bundle', 'get', product_bundle, '--force-repo'), + configs=download_config) + except subprocess.CalledProcessError as cpe: + logging.error('Product bundle download has failed. ' + + _PRODUCT_BUNDLE_FIX_INSTRUCTIONS) + raise + + +def get_current_signature(): + """Determines the SDK version of the product-bundles associated with the SDK. + + Parses this information from the URLs of the product-bundle. + + Returns: + An SDK version string, or None if no product-bundle versions are downloaded. + """ + product_bundles = get_product_bundles() + if not product_bundles: + logging.info('No product bundles - signature will default to None') + return None + product_bundle_urls = get_product_bundle_urls() + + # Get the numbers, hope they're the same. + signatures = set() + for bundle in product_bundle_urls: + m = re.search(r'/(\d+\.\d+\.\d+.\d+|\d+)/', bundle['url']) + assert m, 'Must have a signature in each URL' + signatures.add(m.group(1)) + + if len(signatures) > 1: + raise RuntimeError('Found more than one product signature. ' + + _PRODUCT_BUNDLE_FIX_INSTRUCTIONS) + + return next(iter(signatures)) if signatures else None + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--verbose', + '-v', + action='store_true', + help='Enable debug-level logging.') + parser.add_argument( + 'product_bundles', + type=str, + help='List of product bundles to download, represented as a comma ' + 'separated list.') + args = parser.parse_args() + + logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) + + # Check whether there's Fuchsia support for this platform. + common.get_host_os() + + new_product_bundles = convert_to_product_bundle( + args.product_bundles.split(',')) + logging.info('Searching for the following product bundles: %s', + str(new_product_bundles)) + for pb in new_product_bundles: + if pb not in _PRODUCT_BUNDLES: + raise ValueError(f'{pb} is not part of the Fuchsia product bundle.') + + if '*' in args.product_bundles: + raise ValueError('Wildcards are no longer supported, all product bundles ' + 'need to be explicitly listed. The full list can be ' + 'found in the DEPS file.') + + with ExitStack() as stack: + + # Re-set the directory to which product bundles are downloaded so that + # these bundles are located inside the Chromium codebase. + common.run_ffx_command( + ('config', 'set', 'pbms.storage.path', common.IMAGES_ROOT)) + + logging.debug('Checking for override file') + + # TODO(crbug/1380807): Remove when product bundles can be downloaded + # for custom SDKs without editing metadata + override_file = os.path.join(os.path.dirname(__file__), 'sdk_override.txt') + pb_metadata = None + if os.path.isfile(override_file): + with open(override_file) as f: + pb_metadata = f.read().strip().split('\n') + pb_metadata.append('{sdk.root}/*.json') + logging.debug('Applied overrides') + + logging.debug('Getting new SDK hash') + new_sdk_hash = get_hash_from_sdk() + keep_product_bundles_by_sdk_version(new_sdk_hash) + logging.debug('Checking for current signature') + curr_signature = get_current_signature() + + current_images = get_product_bundles() + + # If SDK versions match, remove the product bundles that are no longer + # needed and download missing ones. + if curr_signature == new_sdk_hash: + logging.debug('Current images: %s, desired images %s', + str(current_images), str(new_product_bundles)) + for image in current_images: + if image not in new_product_bundles: + logging.debug('Removing no longer needed Fuchsia image %s' % image) + remove_product_bundle(image) + + bundles_to_download = set(new_product_bundles) - \ + set(current_images) + for bundle in bundles_to_download: + logging.debug('Downloading image: %s', image) + download_product_bundle(bundle) + + return 0 + + # If SDK versions do not match, remove all existing product bundles + # and download the ones required. + for pb in current_images: + remove_product_bundle(pb) + + logging.debug('Make clean images root') + common.make_clean_directory(common.IMAGES_ROOT) + + download_config = None + if pb_metadata: + download_config = [ + '{"pbms":{"metadata": %s}}' % json.dumps((pb_metadata)) + ] + for pb in new_product_bundles: + logging.debug('Downloading bundle: %s', pb) + download_product_bundle(pb, download_config) + + current_pb = get_product_bundles() + + assert set(current_pb) == set(new_product_bundles), ( + 'Failed to download expected set of product-bundles. ' + f'Expected {new_product_bundles}, got {current_pb}') + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/fuchsia/update_product_bundles_test.py b/build/fuchsia/update_product_bundles_test.py new file mode 100755 index 000000000000..0ffc20cce916 --- /dev/null +++ b/build/fuchsia/update_product_bundles_test.py @@ -0,0 +1,288 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import io +import json +import os +import sys +import unittest +from unittest import mock + +from parameterized import parameterized + +import update_product_bundles + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + 'test'))) + +import common + + +class TestUpdateProductBundles(unittest.TestCase): + def setUp(self): + ffx_mock = mock.Mock() + ffx_mock.returncode = 0 + self._ffx_patcher = mock.patch('common.run_ffx_command', + return_value=ffx_mock) + self._ffx_mock = self._ffx_patcher.start() + self.addCleanup(self._ffx_mock.stop) + + def testConvertToProductBundleDefaultsUnknownImage(self): + self.assertEqual( + update_product_bundles.convert_to_product_bundle(['unknown-image']), + ['unknown-image']) + + def testConvertToProductBundleWarnsDeprecated(self): + with self.assertLogs(level='WARNING') as logs: + deprecated_images = [ + 'qemu.arm64', 'qemu.x64', 'core.x64-dfv2-release', + 'workstation_eng.chromebook-x64-release' + ] + self.assertEqual( + update_product_bundles.convert_to_product_bundle(deprecated_images), [ + 'terminal.qemu-arm64', 'terminal.qemu-x64', 'core.x64-dfv2', + 'workstation_eng.chromebook-x64' + ]) + for i, deprecated_image in enumerate(deprecated_images): + self.assertIn(f'Image name {deprecated_image} has been deprecated', + logs.output[i]) + + @mock.patch('builtins.open') + @mock.patch('os.path.exists') + def testGetHashFromSDK(self, mock_exists, mock_open): + mock_open.return_value = io.StringIO(json.dumps({'id': 'foo-bar'})) + mock_exists.return_value = True + + self.assertEqual(update_product_bundles.get_hash_from_sdk(), 'foo-bar') + + manifest_file = os.path.join(common.SDK_ROOT, 'meta', 'manifest.json') + mock_exists.assert_called_once_with(manifest_file) + mock_open.assert_called_once_with(manifest_file, 'r') + + @mock.patch('builtins.open') + @mock.patch('os.path.exists') + def testGetHashFromSDKRaisesErrorIfNoManifestExists(self, mock_exists, + mock_open): + mock_exists.return_value = False + + self.assertRaises(RuntimeError, update_product_bundles.get_hash_from_sdk) + + @mock.patch('common.run_ffx_command') + def testRemoveRepositoriesRunsRemoveOnGivenRepos(self, ffx_mock): + update_product_bundles.remove_repositories(['foo', 'bar', 'fizz', 'buzz']) + + ffx_mock.assert_has_calls([ + mock.call(('repository', 'remove', 'foo'), check=True), + mock.call(('repository', 'remove', 'bar'), check=True), + mock.call(('repository', 'remove', 'fizz'), check=True), + mock.call(('repository', 'remove', 'buzz'), check=True), + ]) + + @mock.patch('os.path.exists') + @mock.patch('os.path.abspath') + def testGetRepositoriesPrunesReposThatDoNotExist(self, mock_abspath, + mock_exists): + with mock.patch('common.SDK_ROOT', 'some/path'): + self._ffx_mock.return_value.stdout = json.dumps([{ + "name": "terminal.qemu-x64", + "spec": { + "type": "pm", + "path": "some/path/that/exists" + } + }, { + "name": "workstation-eng.chromebook-x64", + "spec": { + "type": "pm", + "path": "some/path/that/does/not/exist" + } + }]) + mock_exists.side_effect = [True, False] + mock_abspath.side_effect = lambda x: x + + self.assertEqual(update_product_bundles.get_repositories(), [{ + "name": "terminal.qemu-x64", + "spec": { + "type": "pm", + "path": "some/path/that/exists" + } + }]) + + self._ffx_mock.assert_has_calls([ + mock.call(('--machine', 'json', 'repository', 'list'), + capture_output=True, + check=True), + mock.call(('repository', 'remove', 'workstation-eng.chromebook-x64'), + check=True) + ]) + + def testRemoveProductBundle(self): + update_product_bundles.remove_product_bundle('some-bundle-foo-bar') + + self._ffx_mock.assert_called_once_with( + ('product-bundle', 'remove', '-f', 'some-bundle-foo-bar')) + + def _InitFFXRunWithProductBundleList(self, sdk_version='10.20221114.2.1'): + self._ffx_mock.return_value.stdout = f""" + gs://fuchsia/{sdk_version}/bundles.json#workstation_eng.qemu-x64 + gs://fuchsia/{sdk_version}/bundles.json#workstation_eng.chromebook-x64-dfv2 +* gs://fuchsia/{sdk_version}/bundles.json#workstation_eng.chromebook-x64 +* gs://fuchsia/{sdk_version}/bundles.json#terminal.qemu-x64 + gs://fuchsia/{sdk_version}/bundles.json#terminal.qemu-arm64 +* gs://fuchsia/{sdk_version}/bundles.json#core.x64-dfv2 + +*No need to fetch with `ffx product-bundle get ...`. + """ + + def testGetProductBundleUrlsMarksDesiredAsDownloaded(self): + self._InitFFXRunWithProductBundleList() + urls = update_product_bundles.get_product_bundle_urls() + expected_urls = [{ + 'url': + 'gs://fuchsia/10.20221114.2.1/bundles.json#workstation_eng.qemu-x64', + 'downloaded': False, + }, { + 'url': ('gs://fuchsia/10.20221114.2.1/bundles.json#workstation_eng.' + 'chromebook-x64-dfv2'), + 'downloaded': + False, + }, { + 'url': ('gs://fuchsia/10.20221114.2.1/bundles.json#workstation_eng.' + 'chromebook-x64'), + 'downloaded': + True, + }, { + 'url': 'gs://fuchsia/10.20221114.2.1/bundles.json#terminal.qemu-x64', + 'downloaded': True, + }, { + 'url': 'gs://fuchsia/10.20221114.2.1/bundles.json#terminal.qemu-arm64', + 'downloaded': False, + }, { + 'url': 'gs://fuchsia/10.20221114.2.1/bundles.json#core.x64-dfv2', + 'downloaded': True, + }] + + for i, url in enumerate(urls): + self.assertEqual(url, expected_urls[i]) + + @mock.patch('update_product_bundles.get_repositories') + def testGetProductBundlesExtractsProductBundlesFromURLs(self, mock_get_repos): + self._InitFFXRunWithProductBundleList() + mock_get_repos.return_value = [{ + 'name': 'workstation-eng.chromebook-x64' + }, { + 'name': 'terminal.qemu-x64' + }, { + 'name': 'core.x64-dfv2' + }] + + self.assertEqual( + set(update_product_bundles.get_product_bundles()), + set([ + 'workstation_eng.chromebook-x64', + 'terminal.qemu-x64', + 'core.x64-dfv2', + ])) + + @mock.patch('update_product_bundles.get_repositories') + def testGetProductBundlesExtractsProductBundlesFromURLsFiltersMissingRepos( + self, mock_get_repos): + self._InitFFXRunWithProductBundleList() + + # This will be missing two repos from the bundle list: + # core and terminal.qemu-x64 + # Additionally, workstation-eng != workstation_eng, but they will be treated + # as the same product-bundle + mock_get_repos.return_value = [{ + 'name': 'workstation-eng.chromebook-x64' + }, { + 'name': 'terminal.qemu-arm64' + }] + + self.assertEqual(update_product_bundles.get_product_bundles(), + ['workstation_eng.chromebook-x64']) + self._ffx_mock.assert_has_calls([ + mock.call(('product-bundle', 'remove', '-f', 'terminal.qemu-x64')), + mock.call(('product-bundle', 'remove', '-f', 'core.x64-dfv2')), + ], + any_order=True) + + @mock.patch('common.run_ffx_command') + @mock.patch('update_product_bundles.update_repositories_list') + def testDownloadProductBundleUpdatesRepoListBeforeCall( + self, mock_update_repo, mock_ffx): + mock_sequence = mock.Mock() + mock_sequence.attach_mock(mock_update_repo, 'update_repo_list') + mock_sequence.attach_mock(mock_ffx, 'run_ffx_command') + + update_product_bundles.download_product_bundle('some-bundle', None) + + mock_sequence.assert_has_calls([ + mock.call.update_repo_list(), + mock.call.run_ffx_command( + ('product-bundle', 'get', 'some-bundle', '--force-repo'), + configs=None) + ]) + + @mock.patch('common.run_ffx_command') + @mock.patch('update_product_bundles.get_product_bundle_urls') + def testFilterProductBundleURLsRemovesBundlesWithoutGivenString( + self, mock_get_urls, mock_ffx): + mock_get_urls.return_value = [ + { + 'url': 'some-url-has-buzz', + 'downloaded': True, + }, + { + 'url': 'some-url-to-remove-has-foo', + 'downloaded': True, + }, + { + 'url': 'some-url-to-not-remove-has-foo', + 'downloaded': False, + }, + ] + update_product_bundles.keep_product_bundles_by_sdk_version('buzz') + mock_ffx.assert_called_once_with( + ('product-bundle', 'remove', '-f', 'some-url-to-remove-has-foo')) + + @mock.patch('update_product_bundles.get_repositories') + def testGetCurrentSignatureReturnsNoneIfNoProductBundles( + self, mock_get_repos): + self._InitFFXRunWithProductBundleList() + + # Forces no product-bundles + mock_get_repos.return_value = [] + + # Mutes logs + with self.assertLogs(): + self.assertIsNone(update_product_bundles.get_current_signature()) + + @mock.patch('update_product_bundles.get_repositories') + def testGetCurrentSignatureParsesVersionCorrectly(self, mock_get_repos): + self._InitFFXRunWithProductBundleList() + mock_get_repos.return_value = [{ + 'name': 'workstation-eng.chromebook-x64' + }, { + 'name': 'terminal.qemu-x64' + }] + + self.assertEqual('10.20221114.2.1', + update_product_bundles.get_current_signature()) + + @mock.patch('update_product_bundles.get_repositories') + def testGetCurrentSignatureParsesCustomArtifactsCorrectlys( + self, mock_get_repos): + self._InitFFXRunWithProductBundleList(sdk_version='51390009') + mock_get_repos.return_value = [{ + 'name': 'workstation-eng.chromebook-x64' + }, { + 'name': 'terminal.qemu-x64' + }] + + self.assertEqual('51390009', update_product_bundles.get_current_signature()) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/fuchsia/update_sdk.py b/build/fuchsia/update_sdk.py index a1c9621facc1..2b30a9c3c0a9 100755 --- a/build/fuchsia/update_sdk.py +++ b/build/fuchsia/update_sdk.py @@ -1,166 +1,107 @@ -#!/usr/bin/env python -# Copyright 2017 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2022 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. - -"""Updates the Fuchsia SDK to the given revision. Should be used in a 'hooks_os' -entry so that it only runs when .gclient's target_os includes 'fuchsia'.""" +"""Check out the Fuchsia SDK from a given GCS path. Should be used in a +'hooks_os' entry so that it only runs when .gclient's custom_vars includes +'fuchsia'.""" import argparse import logging import os -import re -import shutil +import platform import subprocess import sys -import tarfile - -from common import GetHostOsFromPlatform, GetHostArchFromPlatform, \ - DIR_SOURCE_ROOT, SDK_ROOT - -sys.path.append(os.path.join(DIR_SOURCE_ROOT, 'build')) -import find_depot_tools +from typing import Optional -SDK_SIGNATURE_FILE = '.hash' -SDK_TARBALL_PATH_TEMPLATE = ( - 'gs://{bucket}/development/{sdk_hash}/sdk/{platform}-amd64/gn.tar.gz') +from gcs_download import DownloadAndUnpackFromCloudStorage +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + 'test'))) -def ReadFile(filename): - with open(os.path.join(os.path.dirname(__file__), filename), 'r') as f: - return f.read() +from common import SDK_ROOT, get_host_arch, get_host_os, make_clean_directory -# TODO(crbug.com/1138433): Investigate whether we can deprecate -# use of sdk_bucket.txt. -def GetOverrideCloudStorageBucket(): - """Read bucket entry from sdk_bucket.txt""" - return ReadFile('sdk-bucket.txt').strip() +def _GetHostArch(): + host_arch = platform.machine() + # platform.machine() returns AMD64 on 64-bit Windows. + if host_arch in ['x86_64', 'AMD64']: + return 'amd64' + elif host_arch == 'aarch64': + return 'arm64' + raise Exception('Unsupported host architecture: %s' % host_arch) -def GetSdkHash(bucket): - hashes = GetSdkHashList() - return (max(hashes, key=lambda sdk: GetSdkGeneration(bucket, sdk)) - if hashes else None) +def GetSDKOverrideGCSPath(path: Optional[str] = None) -> Optional[str]: + """Fetches the sdk override path from a file. + Args: + path: the full file path to read the data from. + defaults to sdk_override.txt in the directory of this file. -def GetSdkHashList(): - """Read filename entries from sdk-hash-files.list (one per line), substitute - {platform} in each entry if present, and read from each filename.""" - platform = GetHostOsFromPlatform() - filenames = [ - line.strip() for line in ReadFile('sdk-hash-files.list').replace( - '{platform}', platform).splitlines() - ] - sdk_hashes = [ReadFile(filename).strip() for filename in filenames] - return sdk_hashes + Returns: + The contents of the file, stripped of white space. + Example: gs://fuchsia-artifacts/development/some-id/sdk + """ + if not path: + path = os.path.join(os.path.dirname(__file__), 'sdk_override.txt') - -def GetSdkGeneration(bucket, hash): - if not hash: + if not os.path.isfile(path): return None - sdk_path = GetSdkTarballPath(bucket, hash) - cmd = [ - os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gsutil.py'), 'ls', '-L', - sdk_path - ] - logging.debug("Running '%s'", " ".join(cmd)) - sdk_details = subprocess.check_output(cmd).decode('utf-8') - m = re.search('Generation:\s*(\d*)', sdk_details) - if not m: - raise RuntimeError('Could not find SDK generation for {sdk_path}'.format( - sdk_path=sdk_path)) - return int(m.group(1)) - - -def GetSdkTarballPath(bucket, sdk_hash): - return SDK_TARBALL_PATH_TEMPLATE.format( - bucket=bucket, sdk_hash=sdk_hash, platform=GetHostOsFromPlatform()) - - -# Updates the modification timestamps of |path| and its contents to the -# current time. -def UpdateTimestampsRecursive(): - for root, dirs, files in os.walk(SDK_ROOT): - for f in files: - os.utime(os.path.join(root, f), None) - for d in dirs: - os.utime(os.path.join(root, d), None) - - -# Fetches a tarball from GCS and uncompresses it to |output_dir|. -def DownloadAndUnpackFromCloudStorage(url, output_dir): - # Pass the compressed stream directly to 'tarfile'; don't bother writing it - # to disk first. - cmd = [os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gsutil.py'), - 'cp', url, '-'] - logging.debug('Running "%s"', ' '.join(cmd)) - task = subprocess.Popen(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE) - try: - tarfile.open(mode='r|gz', fileobj=task.stdout).extractall(path=output_dir) - except tarfile.ReadError: - task.wait() - stderr = task.stderr.read() - raise subprocess.CalledProcessError(task.returncode, cmd, - "Failed to read a tarfile from gsutil.py.{}".format( - stderr if stderr else "")) - task.wait() - if task.returncode: - raise subprocess.CalledProcessError(task.returncode, cmd, - task.stderr.read()) + with open(path, 'r') as f: + return f.read().strip() -def MakeCleanDirectory(directory_name): - if (os.path.exists(directory_name)): - shutil.rmtree(directory_name) - os.mkdir(directory_name) +def _GetTarballPath(gcs_tarball_prefix: str) -> str: + """Get the full path to the sdk tarball on GCS""" + platform = get_host_os() + arch = _GetHostArch() + return f'{gcs_tarball_prefix}/{platform}-{arch}/gn.tar.gz' def main(): parser = argparse.ArgumentParser() - parser.add_argument('--verbose', '-v', - action='store_true', - help='Enable debug-level logging.') - parser.add_argument( - '--default-bucket', - type=str, - default='fuchsia', - help='The Google Cloud Storage bucket in which the Fuchsia SDK is ' - 'stored. Entry in sdk-bucket.txt will override this flag.') + parser.add_argument('--cipd-prefix', help='CIPD base directory for the SDK.') + parser.add_argument('--version', help='Specifies the SDK version.') + parser.add_argument('--verbose', + '-v', + action='store_true', + help='Enable debug-level logging.') args = parser.parse_args() logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) - # Quietly exit if there's no SDK support for this platform. + # Exit if there's no SDK support for this platform. try: - GetHostOsFromPlatform() + host_plat = get_host_os() except: + logging.warning('Fuchsia SDK is not supported on this platform.') return 0 - # Use the bucket in sdk-bucket.txt if an entry exists. - # Otherwise use the default bucket. - bucket = GetOverrideCloudStorageBucket() or args.default_bucket - - sdk_hash = GetSdkHash(bucket) - if not sdk_hash: - return 1 - - signature_filename = os.path.join(SDK_ROOT, SDK_SIGNATURE_FILE) - current_signature = (open(signature_filename, 'r').read().strip() - if os.path.exists(signature_filename) else '') - if current_signature != sdk_hash: - logging.info('Downloading GN SDK %s...' % sdk_hash) - - MakeCleanDirectory(SDK_ROOT) - DownloadAndUnpackFromCloudStorage(GetSdkTarballPath(bucket, sdk_hash), - SDK_ROOT) - - with open(signature_filename, 'w') as f: - f.write(sdk_hash) - - UpdateTimestampsRecursive() + gcs_tarball_prefix = GetSDKOverrideGCSPath() + + # Download from CIPD if there is no override file. + if not gcs_tarball_prefix: + if not args.cipd_prefix: + parser.exit(1, '--cipd-prefix must be specified.') + if not args.version: + parser.exit(2, '--version must be specified.') + logging.info('Downloading GN SDK from CIPD...') + ensure_file = '%s%s-%s %s' % (args.cipd_prefix, host_plat, _GetHostArch(), + args.version) + subprocess.run(('cipd', 'ensure', '-ensure-file', '-', '-root', SDK_ROOT, + '-log-level', 'warning'), + check=True, + text=True, + input=ensure_file) + return 0 + # Always re-download the SDK. + logging.info('Downloading GN SDK from GCS...') + make_clean_directory(SDK_ROOT) + DownloadAndUnpackFromCloudStorage(_GetTarballPath(gcs_tarball_prefix), + SDK_ROOT) return 0 diff --git a/build/fuchsia/update_sdk_test.py b/build/fuchsia/update_sdk_test.py new file mode 100755 index 000000000000..5def6796d449 --- /dev/null +++ b/build/fuchsia/update_sdk_test.py @@ -0,0 +1,69 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import unittest +from unittest import mock + +from parameterized import parameterized + +from update_sdk import _GetHostArch +from update_sdk import _GetTarballPath +from update_sdk import GetSDKOverrideGCSPath + + +@mock.patch('platform.machine') +class TestGetHostArch(unittest.TestCase): + @parameterized.expand([('x86_64', 'amd64'), ('AMD64', 'amd64'), + ('aarch64', 'arm64')]) + def testSupportedArchs(self, mock_machine, arch, expected): + mock_machine.return_value = arch + self.assertEqual(_GetHostArch(), expected) + + def testUnsupportedArch(self, mock_machine): + mock_machine.return_value = 'bad_arch' + with self.assertRaises(Exception): + _GetHostArch() + + +@mock.patch('builtins.open') +@mock.patch('os.path.isfile') +class TestGetSDKOverrideGCSPath(unittest.TestCase): + def testFileNotFound(self, mock_isfile, mock_open): + mock_isfile.return_value = False + + actual = GetSDKOverrideGCSPath('this-file-does-not-exist.txt') + self.assertIsNone(actual) + + def testDefaultPath(self, mock_isfile, mock_open): + mock_isfile.return_value = False + + with mock.patch('os.path.dirname', return_value='./'): + GetSDKOverrideGCSPath() + + mock_isfile.assert_called_with('./sdk_override.txt') + + def testRead(self, mock_isfile, mock_open): + fake_path = '\n\ngs://fuchsia-artifacts/development/abc123/sdk\n\n' + + mock_isfile.return_value = True + mock_open.side_effect = mock.mock_open(read_data=fake_path) + + actual = GetSDKOverrideGCSPath() + self.assertEqual(actual, 'gs://fuchsia-artifacts/development/abc123/sdk') + + +@mock.patch('update_sdk._GetHostArch') +@mock.patch('update_sdk.get_host_os') +class TestGetTarballPath(unittest.TestCase): + def testGetTarballPath(self, mock_get_host_os, mock_host_arch): + mock_get_host_os.return_value = 'linux' + mock_host_arch.return_value = 'amd64' + + actual = _GetTarballPath('gs://bucket/sdk') + self.assertEqual(actual, 'gs://bucket/sdk/linux-amd64/gn.tar.gz') + + +if __name__ == '__main__': + unittest.main() diff --git a/build/gdb-add-index b/build/gdb-add-index index 73367c835034..e756ceacc62a 100755 --- a/build/gdb-add-index +++ b/build/gdb-add-index @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # diff --git a/build/get_landmines.py b/build/get_landmines.py index a32ab9937c7a..6155d71deca1 100755 --- a/build/get_landmines.py +++ b/build/get_landmines.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2013 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -8,7 +8,6 @@ (or a list of 'landmines'). """ -from __future__ import print_function import sys @@ -78,6 +77,9 @@ def print_landmines(): print('The Great Blink mv for source files (crbug.com/768828)') if host_os() == 'linux': print('Clobber to workaround buggy .ninja_deps cycle (crbug.com/934404)') + print('Clobber to flush stale generated files. See crbug.com/1406628') + print('Clobber to flush old .ninja_log files for updating ninja. ' + 'See crbug.com/1406628#c14') def main(): diff --git a/build/get_symlink_targets.py b/build/get_symlink_targets.py index 3285ff1d9303..850bbae70fae 100755 --- a/build/get_symlink_targets.py +++ b/build/get_symlink_targets.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright (c) 2019 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Prints the target paths of the given symlinks. diff --git a/build/gn_helpers.py b/build/gn_helpers.py index c6e4129b2116..34a282eec97b 100644 --- a/build/gn_helpers.py +++ b/build/gn_helpers.py @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/gn_helpers_unittest.py b/build/gn_helpers_unittest.py old mode 100644 new mode 100755 index f4b756cd1760..bb0f31fdf281 --- a/build/gn_helpers_unittest.py +++ b/build/gn_helpers_unittest.py @@ -1,4 +1,5 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/gn_logs.gni b/build/gn_logs.gni index 79a92b9d4ffa..495f38658321 100644 --- a/build/gn_logs.gni +++ b/build/gn_logs.gni @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/gn_run_binary.py b/build/gn_run_binary.py index d7e2926fa4fb..414f6952fe3c 100644 --- a/build/gn_run_binary.py +++ b/build/gn_run_binary.py @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -8,7 +8,6 @@ python gn_run_binary.py [args ...] """ -from __future__ import print_function import os import subprocess diff --git a/build/install-build-deps-android.sh b/build/install-build-deps-android.sh deleted file mode 100755 index 882e7be903c5..000000000000 --- a/build/install-build-deps-android.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash - -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Script to install everything needed to build chromium on android, including -# items requiring sudo privileges. -# See https://www.chromium.org/developers/how-tos/android-build-instructions - -args="$@" - -if ! uname -m | egrep -q "i686|x86_64"; then - echo "Only x86 architectures are currently supported" >&2 - exit -fi - -# Exit if any commands fail. -set -e - -lsb_release=$(lsb_release --codename --short) - -# Install first the default Linux build deps. -"$(dirname "${BASH_SOURCE[0]}")/install-build-deps.sh" \ - --no-syms --lib32 --no-arm --no-chromeos-fonts --no-nacl --no-prompt "${args}" - -# Fix deps -sudo apt-get -f install - -# common -sudo apt-get -y install lib32z1 lighttpd xvfb x11-utils - -# Some binaries in the Android SDK require 32-bit libraries on the host. -# See https://developer.android.com/sdk/installing/index.html?pkg=tools -sudo apt-get -y install libncurses5:i386 libstdc++6:i386 zlib1g:i386 - -# Required for apk-patch-size-estimator -sudo apt-get -y install bsdiff - -echo "install-build-deps-android.sh complete." diff --git a/build/install-build-deps.sh b/build/install-build-deps.sh index 13076f3f5b06..de4081dc1678 100755 --- a/build/install-build-deps.sh +++ b/build/install-build-deps.sh @@ -1,17 +1,20 @@ #!/bin/bash -e -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # Script to install everything needed to build chromium (well, ideally, anyway) +# including items requiring sudo privileges. # See https://chromium.googlesource.com/chromium/src/+/main/docs/linux/build_instructions.md +# and https://chromium.googlesource.com/chromium/src/+/HEAD/docs/android_build_instructions.md usage() { echo "Usage: $0 [--options]" echo "Options:" echo "--[no-]syms: enable or disable installation of debugging symbols" echo "--lib32: enable installation of 32-bit libraries, e.g. for V8 snapshot" + echo "--[no-]android: enable or disable installation of android dependencies" echo "--[no-]arm: enable or disable installation of arm cross toolchain" echo "--[no-]chromeos-fonts: enable or disable installation of Chrome OS"\ "fonts" @@ -33,14 +36,13 @@ usage() { build_apt_package_list() { echo "Building apt package list." >&2 apt-cache dumpavail | \ - python3 -c '\ - import re,sys; \ - o = sys.stdin.read(); \ - p = {"i386": ":i386"}; \ - f = re.M | re.S; \ - r = re.compile(r"^Package: (.+?)$.+?^Architecture: (.+?)$", f); \ - m = ["%s%s" % (x, p.get(y, "")) for x, y in re.findall(r, o)]; \ - print("\n".join(m))' + python3 -c 'import re,sys; \ +o = sys.stdin.read(); \ +p = {"i386": ":i386"}; \ +f = re.M | re.S; \ +r = re.compile(r"^Package: (.+?)$.+?^Architecture: (.+?)$", f); \ +m = ["%s%s" % (x, p.get(y, "")) for x, y in re.findall(r, o)]; \ +print("\n".join(m))' } # Checks whether a particular package is available in the repos. @@ -57,12 +59,9 @@ package_exists() { [ ! -z "$(grep "^${escaped}$" <<< "${apt_package_list}")" ] } -# These default to on because (some) bots need them and it keeps things -# simple for the bot setup if all bots just run the script in its default -# mode. Developers who don't want stuff they don't need installed on their -# own workstations can pass --no-arm --no-nacl when running the script. -do_inst_arm=1 -do_inst_nacl=1 +do_inst_arm=0 +do_inst_nacl=0 +do_inst_android=0 while [ "$1" != "" ] do @@ -70,6 +69,8 @@ do --syms) do_inst_syms=1;; --no-syms) do_inst_syms=0;; --lib32) do_inst_lib32=1;; + --android) do_inst_android=1;; + --no-android) do_inst_android=0;; --arm) do_inst_arm=1;; --no-arm) do_inst_arm=0;; --chromeos-fonts) do_inst_chromeos_fonts=1;; @@ -93,6 +94,10 @@ if [ "$do_inst_arm" = "1" ]; then do_inst_lib32=1 fi +if [ "$do_inst_android" = "1" ]; then + do_inst_lib32=1 +fi + # Check for lsb_release command in $PATH if ! which lsb_release > /dev/null; then echo "ERROR: lsb_release not found in \$PATH" >&2 @@ -102,21 +107,22 @@ fi distro_codename=$(lsb_release --codename --short) distro_id=$(lsb_release --id --short) -supported_codenames="(trusty|xenial|bionic|disco|eoan|focal|groovy)" +supported_codenames="(bionic|focal|jammy)" supported_ids="(Debian)" if [ 0 -eq "${do_unsupported-0}" ] && [ 0 -eq "${do_quick_check-0}" ] ; then if [[ ! $distro_codename =~ $supported_codenames && ! $distro_id =~ $supported_ids ]]; then - echo -e "ERROR: The only supported distros are\n" \ - "\tUbuntu 14.04 LTS (trusty with EoL April 2022)\n" \ - "\tUbuntu 16.04 LTS (xenial with EoL April 2024)\n" \ + echo -e "WARNING: The following distributions are supported, + but distributions not in the list below can also try to install + dependencies by passing the `--unsupported` parameter\n" \ "\tUbuntu 18.04 LTS (bionic with EoL April 2028)\n" \ - "\tUbuntu 20.04 LTS (focal with Eol April 2030)\n" \ - "\tUbuntu 20.10 (groovy)\n" \ - "\tDebian 8 (jessie) or later" >&2 + "\tUbuntu 20.04 LTS (focal with EoL April 2030)\n" \ + "\tUbuntu 22.04 LTS (jammy with EoL April 2032)\n" \ + "\tDebian 10 (buster) or later" >&2 exit 1 fi +# Check system architecture if ! uname -m | egrep -q "i686|x86_64"; then echo "Only x86 architectures are currently supported" >&2 exit @@ -140,7 +146,7 @@ fi apt_package_list=$(build_apt_package_list) # Packages needed for chromeos only -chromeos_dev_list="libbluetooth-dev libxkbcommon-dev mesa-common-dev" +chromeos_dev_list="libbluetooth-dev libxkbcommon-dev mesa-common-dev zstd" if package_exists realpath; then chromeos_dev_list="${chromeos_dev_list} realpath" @@ -161,7 +167,6 @@ dev_list="\ flex git-core gperf - libappindicator3-dev libasound2-dev libatspi2.0-dev libbrlapi-dev @@ -189,6 +194,7 @@ dev_list="\ libspeechd-dev libsqlite3-dev libssl-dev + libsystemd-dev libudev-dev libva-dev libwww-perl @@ -197,13 +203,13 @@ dev_list="\ libxss-dev libxt-dev libxtst-dev + lighttpd locales openbox p7zip patch perl pkg-config - python-setuptools rpm ruby subversion @@ -216,12 +222,6 @@ dev_list="\ $chromeos_dev_list " -if package_exists python-is-python2; then - dev_list="${dev_list} python-is-python2 python2-dev" -else - dev_list="${dev_list} python python-dev" -fi - # 64-bit systems need a minimum set of 32-bit compat packages for the pre-built # NaCl binaries. if file -L /sbin/init | grep -q 'ELF 64-bit'; then @@ -240,24 +240,29 @@ chromeos_lib_list="libpulse0 libbz2-1.0" # List of required run-time libraries common_lib_list="\ - libappindicator3-1 + lib32z1 libasound2 libatk1.0-0 libatspi2.0-0 libc6 libcairo2 libcap2 + libcgi-session-perl libcups2 libdrm2 + libegl1 libevdev2 libexpat1 libfontconfig1 libfreetype6 libgbm1 libglib2.0-0 + libgl1 libgtk-3-0 + libncurses5 libpam0g libpango-1.0-0 + libpangocairo-1.0-0 libpci3 libpcre3 libpixman-1-0 @@ -265,6 +270,7 @@ common_lib_list="\ libstdc++6 libsqlite3-0 libuuid1 + libwayland-egl1 libwayland-egl1-mesa libx11-6 libx11-xcb1 @@ -281,10 +287,14 @@ common_lib_list="\ libxrandr2 libxrender1 libxtst6 + x11-utils + xvfb zlib1g " -if package_exists libffi7; then +if package_exists libffi8; then + common_lib_list="${common_lib_list} libffi8" +elif package_exists libffi7; then common_lib_list="${common_lib_list} libffi7" elif package_exists libffi6; then common_lib_list="${common_lib_list} libffi6" @@ -300,7 +310,31 @@ lib_list="\ lib32_list="linux-libc-dev:i386 libpci3:i386" # 32-bit libraries needed for a 32-bit build -lib32_list="$lib32_list libx11-xcb1:i386" +# includes some 32-bit libraries required by the Android SDK +# See https://developer.android.com/sdk/installing/index.html?pkg=tools +lib32_list="$lib32_list + libasound2:i386 + libatk-bridge2.0-0:i386 + libatk1.0-0:i386 + libatspi2.0-0:i386 + libdbus-1-3:i386 + libegl1:i386 + libgl1:i386 + libglib2.0-0:i386 + libncurses5:i386 + libnss3:i386 + libpango-1.0-0:i386 + libpangocairo-1.0-0:i386 + libstdc++6:i386 + libwayland-egl1:i386 + libx11-xcb1:i386 + libxcomposite1:i386 + libxdamage1:i386 + libxkbcommon0:i386 + libxrandr2:i386 + libxtst6:i386 + zlib1g:i386 +" # Packages that have been removed from this script. Regardless of configuration # or options passed to this script, whenever a package is removed, it should be @@ -313,6 +347,15 @@ backwards_compatible_list="\ fonts-thai-tlwg fonts-tlwg-garuda g++ + g++-4.8-multilib-arm-linux-gnueabihf + gcc-4.8-multilib-arm-linux-gnueabihf + g++-9-multilib-arm-linux-gnueabihf + gcc-9-multilib-arm-linux-gnueabihf + gcc-arm-linux-gnueabihf + g++-10-multilib-arm-linux-gnueabihf + gcc-10-multilib-arm-linux-gnueabihf + g++-10-arm-linux-gnueabihf + gcc-10-arm-linux-gnueabihf git-svn language-pack-da language-pack-fr @@ -320,6 +363,8 @@ backwards_compatible_list="\ language-pack-zh-hant libappindicator-dev libappindicator1 + libappindicator3-1 + libappindicator3-dev libdconf-dev libdconf1 libdconf1:i386 @@ -327,17 +372,29 @@ backwards_compatible_list="\ libexif12 libexif12:i386 libgbm-dev + libgbm-dev-lts-trusty + libgbm-dev-lts-xenial libgconf-2-4:i386 libgconf2-dev libgl1-mesa-dev + libgl1-mesa-dev-lts-trusty + libgl1-mesa-dev-lts-xenial libgl1-mesa-glx:i386 + libgl1-mesa-glx-lts-trusty:i386 + libgl1-mesa-glx-lts-xenial:i386 libgles2-mesa-dev + libgles2-mesa-dev-lts-trusty + libgles2-mesa-dev-lts-xenial libgtk-3-0:i386 libgtk2.0-0 libgtk2.0-0:i386 libgtk2.0-dev mesa-common-dev + mesa-common-dev-lts-trusty + mesa-common-dev-lts-xenial msttcorefonts + python-dev + python-setuptools ttf-dejavu-core ttf-indic-fonts ttf-kochi-gothic @@ -345,100 +402,78 @@ backwards_compatible_list="\ ttf-mscorefonts-installer xfonts-mathml " + if package_exists python-is-python2; then - backwards_compatible_list="${backwards_compatible_list} python-dev" + backwards_compatible_list="${backwards_compatible_list} python-is-python2 python2-dev" +else + backwards_compatible_list="${backwards_compatible_list} python" fi -case $distro_codename in - trusty) - backwards_compatible_list+=" \ - libgbm-dev-lts-trusty - libgl1-mesa-dev-lts-trusty - libgl1-mesa-glx-lts-trusty:i386 - libgles2-mesa-dev-lts-trusty - mesa-common-dev-lts-trusty" - ;; - xenial) - backwards_compatible_list+=" \ - libgbm-dev-lts-xenial - libgl1-mesa-dev-lts-xenial - libgl1-mesa-glx-lts-xenial:i386 - libgles2-mesa-dev-lts-xenial - mesa-common-dev-lts-xenial" - ;; -esac +if package_exists python-crypto; then + backwards_compatible_list="${backwards_compatible_list} python-crypto" +fi + +if package_exists python-numpy; then + backwards_compatible_list="${backwards_compatible_list} python-numpy" +fi + +if package_exists python-openssl; then + backwards_compatible_list="${backwards_compatible_list} python-openssl" +fi + +if package_exists python-psutil; then + backwards_compatible_list="${backwards_compatible_list} python-psutil" +fi + +if package_exists python-yaml; then + backwards_compatible_list="${backwards_compatible_list} python-yaml" +fi +if package_exists apache2.2-bin; then + backwards_compatible_list="${backwards_compatible_list} apache2.2-bin" +else + backwards_compatible_list="${backwards_compatible_list} apache2-bin" +fi +if package_exists php8.1-cgi; then + backwards_compatible_list="${backwards_compatible_list} php8.1-cgi libapache2-mod-php8.1" +elif package_exists php8.0-cgi; then + backwards_compatible_list="${backwards_compatible_list} php8.0-cgi libapache2-mod-php8.0" +elif package_exists php7.4-cgi; then + backwards_compatible_list="${backwards_compatible_list} php7.4-cgi libapache2-mod-php7.4" +elif package_exists php7.3-cgi; then + backwards_compatible_list="${backwards_compatible_list} php7.3-cgi libapache2-mod-php7.3" +elif package_exists php7.2-cgi; then + backwards_compatible_list="${backwards_compatible_list} php7.2-cgi libapache2-mod-php7.2" +elif package_exists php7.1-cgi; then + backwards_compatible_list="${backwards_compatible_list} php7.1-cgi libapache2-mod-php7.1" +elif package_exists php7.0-cgi; then + backwards_compatible_list="${backwards_compatible_list} php7.0-cgi libapache2-mod-php7.0" +elif package_exists php8.0-cgi; then + backwards_compatible_list="${backwards_compatible_list} php8.0-cgi libapache2-mod-php8.0" +else + backwards_compatible_list="${backwards_compatible_list} php5-cgi libapache2-mod-php5" +fi # arm cross toolchain packages needed to build chrome on armhf -EM_REPO="deb http://emdebian.org/tools/debian/ jessie main" -EM_SOURCE=$(cat </dev/null); then - arm_list+=" ${GPP_ARM_PACKAGE}" - else - if [ "${add_cross_tool_repo}" = "1" ]; then - gpg --keyserver pgp.mit.edu --recv-keys ${EM_ARCHIVE_KEY_FINGER} - gpg -a --export ${EM_ARCHIVE_KEY_FINGER} | sudo apt-key add - - if ! grep "^${EM_REPO}" "${CROSSTOOLS_LIST}" &>/dev/null; then - echo "${EM_SOURCE}" | sudo tee -a "${CROSSTOOLS_LIST}" >/dev/null - fi - arm_list+=" ${GPP_ARM_PACKAGE}" - else - echo "The Debian Cross-toolchains repository is necessary to" - echo "cross-compile Chromium for arm." - echo "Rerun with --add-deb-cross-tool-repo to have it added for you." - fi - fi - fi - ;; - # All necessary ARM packages are available on the default repos on - # Debian 9 and later. - *) - arm_list="libc6-dev-armhf-cross - linux-libc-dev-armhf-cross - ${GPP_ARM_PACKAGE}" - ;; -esac +arm_list="libc6-dev-armhf-cross + linux-libc-dev-armhf-cross + g++-arm-linux-gnueabihf" -# Work around for dependency issue Ubuntu/Trusty: http://crbug.com/435056 +# Work around for dependency issue Ubuntu: http://crbug.com/435056 case $distro_codename in - trusty) - arm_list+=" g++-4.8-multilib-arm-linux-gnueabihf - gcc-4.8-multilib-arm-linux-gnueabihf" - ;; - xenial|bionic) + bionic) arm_list+=" g++-5-multilib-arm-linux-gnueabihf gcc-5-multilib-arm-linux-gnueabihf gcc-arm-linux-gnueabihf" ;; - disco|eoan) - arm_list+=" g++-9-multilib-arm-linux-gnueabihf - gcc-9-multilib-arm-linux-gnueabihf - gcc-arm-linux-gnueabihf" - ;; focal) arm_list+=" g++-10-multilib-arm-linux-gnueabihf gcc-10-multilib-arm-linux-gnueabihf gcc-arm-linux-gnueabihf" ;; - groovy) - arm_list+=" g++-10-multilib-arm-linux-gnueabihf - gcc-10-multilib-arm-linux-gnueabihf - gcc-arm-linux-gnueabihf - g++-10-arm-linux-gnueabihf - gcc-10-arm-linux-gnueabihf" + jammy) + arm_list+=" gcc-arm-linux-gnueabihf + g++-11-arm-linux-gnueabihf + gcc-11-arm-linux-gnueabihf" ;; esac @@ -475,7 +510,9 @@ nacl_list="\ " # Some package names have changed over time -if package_exists libssl1.1; then +if package_exists libssl-dev; then + nacl_list="${nacl_list} libssl-dev:i386" +elif package_exists libssl1.1; then nacl_list="${nacl_list} libssl1.1:i386" elif package_exists libssl1.0.2; then nacl_list="${nacl_list} libssl1.0.2:i386" @@ -516,45 +553,9 @@ elif package_exists libbrlapi0.6; then else dev_list="${dev_list} libbrlapi0.5" fi -if package_exists apache2.2-bin; then - dev_list="${dev_list} apache2.2-bin" -else - dev_list="${dev_list} apache2-bin" -fi if package_exists libav-tools; then dev_list="${dev_list} libav-tools" fi -if package_exists php7.4-cgi; then - dev_list="${dev_list} php7.4-cgi libapache2-mod-php7.4" -elif package_exists php7.3-cgi; then - dev_list="${dev_list} php7.3-cgi libapache2-mod-php7.3" -elif package_exists php7.2-cgi; then - dev_list="${dev_list} php7.2-cgi libapache2-mod-php7.2" -elif package_exists php7.1-cgi; then - dev_list="${dev_list} php7.1-cgi libapache2-mod-php7.1" -elif package_exists php7.0-cgi; then - dev_list="${dev_list} php7.0-cgi libapache2-mod-php7.0" -else - dev_list="${dev_list} php5-cgi libapache2-mod-php5" -fi - -# Most python 2 packages are removed in Ubuntu 20.10, but the build doesn't seem -# to need them, so only install them if they're available. -if package_exists python-crypto; then - dev_list="${dev_list} python-crypto" -fi -if package_exists python-numpy; then - dev_list="${dev_list} python-numpy" -fi -if package_exists python-openssl; then - dev_list="${dev_list} python-openssl" -fi -if package_exists python-psutil; then - dev_list="${dev_list} python-psutil" -fi -if package_exists python-yaml; then - dev_list="${dev_list} python-yaml" -fi # Some packages are only needed if the distribution actually supports # installing them. @@ -601,7 +602,7 @@ fi # that are part of v8 need to be compiled with -m32 which means # that basic multilib support is needed. if file -L /sbin/init | grep -q 'ELF 64-bit'; then - # gcc-multilib conflicts with the arm cross compiler (at least in trusty) but + # gcc-multilib conflicts with the arm cross compiler but # g++-X.Y-multilib gives us the 32-bit support that we need. Find out the # appropriate value of X and Y by seeing what version the current # distribution's g++-multilib package depends on. @@ -667,6 +668,12 @@ else lib32_list= fi +if [ "$do_inst_android" = "1" ]; then + echo "Including Android dependencies." +else + echo "Skipping Android dependencies." +fi + if [ "$do_inst_arm" = "1" ]; then echo "Including ARM cross toolchain." else @@ -776,7 +783,7 @@ if [ "$do_inst_chromeos_fonts" != "0" ]; then echo "This is expected if your repo is installed on a remote file system." fi echo "It is recommended to install your repo on a local file system." - echo "You can skip the installation of the Chrome OS default founts with" + echo "You can skip the installation of the Chrome OS default fonts with" echo "the command line option: --no-chromeos-fonts." exit 1 fi diff --git a/build/install-chroot.sh b/build/install-chroot.sh index d76d53563bd1..a750e1897ce7 100755 --- a/build/install-chroot.sh +++ b/build/install-chroot.sh @@ -1,6 +1,6 @@ #!/bin/bash -e -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/ios/PRESUBMIT.py b/build/ios/PRESUBMIT.py new file mode 100644 index 000000000000..0c7a35580f1f --- /dev/null +++ b/build/ios/PRESUBMIT.py @@ -0,0 +1,20 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +PRESUBMIT_VERSION = '2.0.0' + +USE_PYTHON3 = True + +TEST_PATTERNS = [r'.+_test.py$'] + + +def CheckUnitTests(input_api, output_api): + # Runs all unit tests under the build/ios folder. + return input_api.canned_checks.RunUnitTestsInDirectory( + input_api, + output_api, + '.', + files_to_check=TEST_PATTERNS, + run_on_python2=False, + skip_shebang_check=True) diff --git a/build/ios/extension_bundle_data.gni b/build/ios/extension_bundle_data.gni new file mode 100644 index 000000000000..78006f160728 --- /dev/null +++ b/build/ios/extension_bundle_data.gni @@ -0,0 +1,23 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Template to package an app extension into an app. +# +# Parameters +# +# extension_target: +# name of the extension target to package; the extension +# bundle name must be derived from the target name +# +template("extension_bundle_data") { + assert(defined(invoker.extension_target), + "extension_target must be defined for $target_name") + + bundle_data(target_name) { + public_deps = [ invoker.extension_target ] + outputs = [ "{{bundle_contents_dir}}/PlugIns/{{source_file_part}}" ] + sources = [ get_label_info(invoker.extension_target, "root_out_dir") + "/" + + get_label_info(invoker.extension_target, "name") + ".appex" ] + } +} diff --git a/build/ios/intent_definition.gni b/build/ios/intent_definition.gni index 259f287fc686..29f859cbd363 100644 --- a/build/ios/intent_definition.gni +++ b/build/ios/intent_definition.gni @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/ios/presubmit_support.py b/build/ios/presubmit_support.py new file mode 100644 index 000000000000..773d1586e769 --- /dev/null +++ b/build/ios/presubmit_support.py @@ -0,0 +1,39 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Presubmit helpers for ios + +See https://www.chromium.org/developers/how-tos/depottools/presubmit-scripts +for more details about the presubmit API built into depot_tools. +""" + +from . import update_bundle_filelist + + +def CheckBundleData(input_api, output_api, base, globroot='//'): + root = input_api.change.RepositoryRoot() + filelist = input_api.os_path.join(input_api.PresubmitLocalPath(), + base + '.filelist') + globlist = input_api.os_path.join(input_api.PresubmitLocalPath(), + base + '.globlist') + if globroot.startswith('//'): + globroot = input_api.os_path.join(input_api.change.RepositoryRoot(), + globroot[2:]) + else: + globroot = input_api.os_path.join(input_api.PresubmitLocalPath(), globroot) + if update_bundle_filelist.process_filelist(filelist, + globlist, + globroot, + check=True, + verbose=input_api.verbose) == 0: + return [] + else: + script = input_api.os_path.join(input_api.change.RepositoryRoot(), 'build', + 'ios', 'update_bundle_filelist.py') + + return [ + output_api.PresubmitError( + 'Filelist needs to be re-generated. Please run \'python3 %s %s %s ' + '%s\' and include the changes in this CL' % + (script, filelist, globlist, globroot)) + ] diff --git a/build/ios/presubmit_support_test.py b/build/ios/presubmit_support_test.py new file mode 100755 index 000000000000..6bbc6024efee --- /dev/null +++ b/build/ios/presubmit_support_test.py @@ -0,0 +1,165 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import datetime +import os.path +import sys +import tempfile +import unittest + +sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) + +from PRESUBMIT_test_mocks import MockInputApi, MockOutputApi +from build.ios import presubmit_support + +_TEMP_FILELIST_CONTENTS = """# Copyright %d The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +""" % (datetime.datetime.now().year) + +_TEMP_GLOBLIST_CONTENTS = """** +-*.globlist +-*.filelist +""" + + +class BundleDataPresubmit(unittest.TestCase): + def setUp(self): + self.mock_input_api = MockInputApi() + self.mock_input_api.change.RepositoryRoot = lambda: os.path.join( + os.path.dirname(__file__), '..', '..') + self.mock_input_api.PresubmitLocalPath = lambda: os.path.dirname(__file__) + self.mock_output_api = MockOutputApi() + + def testBasic(self): + """ + Checks that a glob can be expanded to build a file list and if it + matches the existing file list, we should see no error. + """ + results = presubmit_support.CheckBundleData(self.mock_input_api, + self.mock_output_api, + 'test_data/basic', '.') + self.assertEqual([], results) + + def testExclusion(self): + """ + Check that globs can be used to exclude files from file lists. + """ + results = presubmit_support.CheckBundleData(self.mock_input_api, + self.mock_output_api, + 'test_data/exclusions', '.') + self.assertEqual([], results) + + def testDifferentLocalPath(self): + """ + Checks the case where the presubmit directory is not the same as the + globroot, but it is still local (i.e., not relative to the repository + root) + """ + results = presubmit_support.CheckBundleData( + self.mock_input_api, self.mock_output_api, + 'test_data/different_local_path', 'test_data') + self.assertEqual([], results) + + def testRepositoryRelative(self): + """ + Checks the case where globs are relative to the repository root. + """ + results = presubmit_support.CheckBundleData( + self.mock_input_api, self.mock_output_api, + 'test_data/repository_relative') + self.assertEqual([], results) + + def testMissingFilesInFilelist(self): + """ + Checks that we do indeed return an error if the filelist is missing a + file. In this case, all of the test .filelist and .globlist files are + excluded. + """ + results = presubmit_support.CheckBundleData(self.mock_input_api, + self.mock_output_api, + 'test_data/missing', '.') + self.assertEqual(1, len(results)) + + def testExtraFilesInFilelist(self): + """ + Checks the case where extra files have been included in the file list. + """ + results = presubmit_support.CheckBundleData(self.mock_input_api, + self.mock_output_api, + 'test_data/extra', '.') + self.assertEqual(1, len(results)) + + def testOrderInsensitive(self): + """ + Checks that we do not trigger an error for cases where the file list is + correct, but in a different order than the globlist expansion. + """ + results = presubmit_support.CheckBundleData(self.mock_input_api, + self.mock_output_api, + 'test_data/reorder', '.') + self.assertEqual([], results) + + def testUnexpectedHeader(self): + """ + Checks an unexpected header in a file list causes an error. + """ + results = presubmit_support.CheckBundleData(self.mock_input_api, + self.mock_output_api, + 'test_data/comment', '.') + self.assertEqual(1, len(results)) + + def testUntrackedFiles(self): + """ + Checks that the untracked files are correctly ignored. + """ + with tempfile.TemporaryDirectory() as temp_dir: + with open(os.path.join(temp_dir, 'untracked.filelist'), 'w') as f: + f.write(_TEMP_FILELIST_CONTENTS) + with open(os.path.join(temp_dir, 'untracked.globlist'), 'w') as f: + f.write(_TEMP_GLOBLIST_CONTENTS) + with open(os.path.join(temp_dir, 'untracked.txt'), 'w') as f: + f.write('Hello, World!') + path = os.path.join(temp_dir, 'untracked') + self.mock_input_api.change.RepositoryRoot = lambda: temp_dir + self.mock_input_api.PresubmitLocalPath = lambda: temp_dir + results = presubmit_support.CheckBundleData(self.mock_input_api, + self.mock_output_api, + 'untracked') + self.assertEqual([], results) + + def testExcludeDuplicates(self): + """ + Checks that duplicate filenames are not added to a filelist. + """ + results = presubmit_support.CheckBundleData(self.mock_input_api, + self.mock_output_api, + 'test_data/duplicates', '.') + self.assertEqual([], results) + + def testCheckOutsideGloblistDir(self): + """ + Checks that including files outside the globlist directory is an error. + """ + results = presubmit_support.CheckBundleData( + self.mock_input_api, self.mock_output_api, + 'test_data/outside_globlist_dir', '.') + self.assertEqual(1, len(results)) + + def testCheckIgnoreOutsideGloblistDir(self): + """ + Checks that files outside the globlist directory can be ignored. + """ + results = presubmit_support.CheckBundleData( + self.mock_input_api, self.mock_output_api, + 'test_data/ignore_outside_globlist_dir', '.') + self.assertEqual([], results) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/ios/test_data/bar.html b/build/ios/test_data/bar.html new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/build/ios/test_data/basic.filelist b/build/ios/test_data/basic.filelist new file mode 100644 index 000000000000..496dcbda1078 --- /dev/null +++ b/build/ios/test_data/basic.filelist @@ -0,0 +1,7 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +test_data/subdirectory/baz.txt diff --git a/build/ios/test_data/basic.globlist b/build/ios/test_data/basic.globlist new file mode 100644 index 000000000000..b4d7d66aa12b --- /dev/null +++ b/build/ios/test_data/basic.globlist @@ -0,0 +1,5 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +test_data/subdirectory/* diff --git a/build/ios/test_data/comment.filelist b/build/ios/test_data/comment.filelist new file mode 100644 index 000000000000..0f6c30fcd822 --- /dev/null +++ b/build/ios/test_data/comment.filelist @@ -0,0 +1,2 @@ +# This comment is an unexpected header. +test_data/subdirectory/baz.txt diff --git a/build/ios/test_data/comment.globlist b/build/ios/test_data/comment.globlist new file mode 100644 index 000000000000..93c82c61225a --- /dev/null +++ b/build/ios/test_data/comment.globlist @@ -0,0 +1,7 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Some comment followed by an empty line. + +test_data/subdirectory/* diff --git a/build/ios/test_data/different_local_path.filelist b/build/ios/test_data/different_local_path.filelist new file mode 100644 index 000000000000..a45d180b2696 --- /dev/null +++ b/build/ios/test_data/different_local_path.filelist @@ -0,0 +1,9 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +bar.html +foo.css +subdirectory/baz.txt diff --git a/build/ios/test_data/different_local_path.globlist b/build/ios/test_data/different_local_path.globlist new file mode 100644 index 000000000000..a17a781dec39 --- /dev/null +++ b/build/ios/test_data/different_local_path.globlist @@ -0,0 +1,6 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +** +-**list diff --git a/build/ios/test_data/duplicates.filelist b/build/ios/test_data/duplicates.filelist new file mode 100644 index 000000000000..496dcbda1078 --- /dev/null +++ b/build/ios/test_data/duplicates.filelist @@ -0,0 +1,7 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +test_data/subdirectory/baz.txt diff --git a/build/ios/test_data/duplicates.globlist b/build/ios/test_data/duplicates.globlist new file mode 100644 index 000000000000..79bf591dad9c --- /dev/null +++ b/build/ios/test_data/duplicates.globlist @@ -0,0 +1,7 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +test_data/subdirectory/* +# This duplicate glob should have no effect on the resulting filelist. +test_data/subdirectory/* diff --git a/build/ios/test_data/exclusions.filelist b/build/ios/test_data/exclusions.filelist new file mode 100644 index 000000000000..d9e69f187770 --- /dev/null +++ b/build/ios/test_data/exclusions.filelist @@ -0,0 +1,9 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +test_data/bar.html +test_data/foo.css +test_data/subdirectory/baz.txt diff --git a/build/ios/test_data/exclusions.globlist b/build/ios/test_data/exclusions.globlist new file mode 100644 index 000000000000..92c241a70013 --- /dev/null +++ b/build/ios/test_data/exclusions.globlist @@ -0,0 +1,6 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +test_data/** +-test_data/**list diff --git a/build/ios/test_data/extra.filelist b/build/ios/test_data/extra.filelist new file mode 100644 index 000000000000..3597a457dda2 --- /dev/null +++ b/build/ios/test_data/extra.filelist @@ -0,0 +1,8 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +test_data/bar.html +test_data/foo.css diff --git a/build/ios/test_data/extra.globlist b/build/ios/test_data/extra.globlist new file mode 100644 index 000000000000..9fe758f1f178 --- /dev/null +++ b/build/ios/test_data/extra.globlist @@ -0,0 +1,5 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +test_data/*.css diff --git a/build/ios/test_data/foo.css b/build/ios/test_data/foo.css new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/build/ios/test_data/ignore_outside_globlist_dir.filelist b/build/ios/test_data/ignore_outside_globlist_dir.filelist new file mode 100644 index 000000000000..a306b7ea044e --- /dev/null +++ b/build/ios/test_data/ignore_outside_globlist_dir.filelist @@ -0,0 +1,8 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +presubmit_support_test.py +test_data/subdirectory/baz.txt diff --git a/build/ios/test_data/ignore_outside_globlist_dir.globlist b/build/ios/test_data/ignore_outside_globlist_dir.globlist new file mode 100644 index 000000000000..471a0c46f066 --- /dev/null +++ b/build/ios/test_data/ignore_outside_globlist_dir.globlist @@ -0,0 +1,8 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +test_data/subdirectory/* +# push(ignore-relative) +presubmit_support_test.py +# pop(ignore-relative) diff --git a/build/ios/test_data/missing.filelist b/build/ios/test_data/missing.filelist new file mode 100644 index 000000000000..d9e69f187770 --- /dev/null +++ b/build/ios/test_data/missing.filelist @@ -0,0 +1,9 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +test_data/bar.html +test_data/foo.css +test_data/subdirectory/baz.txt diff --git a/build/ios/test_data/missing.globlist b/build/ios/test_data/missing.globlist new file mode 100644 index 000000000000..267b25246fcc --- /dev/null +++ b/build/ios/test_data/missing.globlist @@ -0,0 +1,8 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This should cover every file in test_data/ and its subdirectories (including +# test files). + +test_data/** diff --git a/build/ios/test_data/outside_globlist_dir.filelist b/build/ios/test_data/outside_globlist_dir.filelist new file mode 100644 index 000000000000..a81d5ad7386b --- /dev/null +++ b/build/ios/test_data/outside_globlist_dir.filelist @@ -0,0 +1,8 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +test_data/subdirectory/baz.txt +presubmit_support_test.py diff --git a/build/ios/test_data/outside_globlist_dir.globlist b/build/ios/test_data/outside_globlist_dir.globlist new file mode 100644 index 000000000000..31bb073bc3fe --- /dev/null +++ b/build/ios/test_data/outside_globlist_dir.globlist @@ -0,0 +1,6 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +test_data/subdirectory/* +presubmit_support_test.py diff --git a/build/ios/test_data/reorder.filelist b/build/ios/test_data/reorder.filelist new file mode 100644 index 000000000000..58921bc4facd --- /dev/null +++ b/build/ios/test_data/reorder.filelist @@ -0,0 +1,9 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +test_data/subdirectory/baz.txt +test_data/foo.css +test_data/bar.html diff --git a/build/ios/test_data/reorder.globlist b/build/ios/test_data/reorder.globlist new file mode 100644 index 000000000000..92c241a70013 --- /dev/null +++ b/build/ios/test_data/reorder.globlist @@ -0,0 +1,6 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +test_data/** +-test_data/**list diff --git a/build/ios/test_data/repository_relative.filelist b/build/ios/test_data/repository_relative.filelist new file mode 100644 index 000000000000..796087b1da87 --- /dev/null +++ b/build/ios/test_data/repository_relative.filelist @@ -0,0 +1,9 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +//build/ios/test_data/bar.html +//build/ios/test_data/foo.css +//build/ios/test_data/subdirectory/baz.txt diff --git a/build/ios/test_data/repository_relative.globlist b/build/ios/test_data/repository_relative.globlist new file mode 100644 index 000000000000..b7c42100ac2c --- /dev/null +++ b/build/ios/test_data/repository_relative.globlist @@ -0,0 +1,6 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +//build/ios/test_data/** +-//build/ios/test_data/**list diff --git a/build/ios/test_data/subdirectory/baz.txt b/build/ios/test_data/subdirectory/baz.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/build/ios/update_bundle_filelist.py b/build/ios/update_bundle_filelist.py new file mode 100755 index 000000000000..2e21205c308d --- /dev/null +++ b/build/ios/update_bundle_filelist.py @@ -0,0 +1,318 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +""" +Updates .filelist files using data from corresponding .globlist files (or +checks whether they are up to date). + +bundle_data targets require an explicit source list, but maintaining these large +lists can be cumbersome. This script aims to simplify the process of updating +these lists by either expanding globs to update file lists or check that an +existing file list matches such an expansion (i.e., checking during presubmit). + +The .globlist file contains a list of globs that will be expanded to either +compare or replace a corresponding .filelist. It is possible to exclude items +from the file list with globs as well. These lines are prefixed with '-' and are +processed in order, so be sure that exclusions succeed inclusions in the list of +globs. Comments and empty lines are permitted in .globfiles; comments are +prefixed with '#'. + +By convention, the base name of the .globlist and .filelist files matches the +label of their corresponding bundle_data from the .gn file. In order to ensure +that these filelists don't get stale, there should also be a PRESUBMIT.py +which uses this script to check that list is up to date. + +By default, the script will update the file list to match the expanded globs. +""" + +import argparse +import datetime +import difflib +import glob +import os.path +import re +import subprocess +import sys + +# Character to set colors in terminal. Taken, along with the printing routine +# below, from update_deps.py. +TERMINAL_ERROR_COLOR = '\033[91m' +TERMINAL_RESET_COLOR = '\033[0m' + +_HEADER = """# Copyright %d The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +""" % (datetime.datetime.now().year) + +_HEADER_PATTERN = re.compile(r"""# Copyright [0-9]+ The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +""") + +_HEADER_HEIGHT = 6 + +_START_IGNORE_EXPANSIONS_OUTSIDE_GLOBLIST_DIR = '# push(ignore-relative)' +_STOP_IGNORE_EXPANSIONS_OUTSIDE_GLOBLIST_DIR = '# pop(ignore-relative)' + + +def parse_filelist(filelist_name): + try: + with open(filelist_name) as filelist: + unfiltered = [l for l in filelist] + header = ''.join(unfiltered[:_HEADER_HEIGHT]) + files = sorted(l.strip() for l in unfiltered[_HEADER_HEIGHT:]) + return (files, header) + except Exception as e: + print_error(f'Could not read file list: {filelist_name}', f'{type(e)}: {e}') + return [] + + +def get_git_command_name(): + if sys.platform.startswith('win'): + return 'git.bat' + return 'git' + + +def get_tracked_files(directory, globroot, repository_root_relative, verbose): + try: + git_cmd = get_git_command_name() + with subprocess.Popen([git_cmd, 'ls-files', '--error-unmatch', directory], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=globroot) as p: + output = p.communicate() + if p.returncode != 0: + if verbose: + print_error( + f'Could not gather a list of tracked files in {directory}', + f'{output[1]}') + return set() + + files = [f.decode('utf-8') for f in output[0].splitlines()] + + # Need paths to be relative to directory in order to match expansions. + # This should happen naturally due to cwd above, but we need to take + # special care if relative to the repository root. + if repository_root_relative: + files = ['//' + f for f in files] + + # Handle Windows backslashes + files = [f.replace('\\', '/') for f in files] + + return set(files) + + except Exception as e: + if verbose: + print_error(f'Could not gather a list of tracked files in {directory}', + f'{type(e)}: {e}') + return set() + + +def combine_potentially_repository_root_relative_paths(a, b): + if b.startswith('//'): + # If b is relative to the repository root, os.path will consider it absolute + # and os.path.join will fail. In this case, we can simply concatenate the + # paths. + return (a + b, True) + else: + return (os.path.join(a, b), False) + + +def parse_and_expand_globlist(globlist_name, glob_root): + # The following expects glob_root not to end in a trailing slash. + if glob_root.endswith('/'): + glob_root = glob_root[:-1] + + check_expansions_outside_globlist_dir = True + globlist_dir = os.path.dirname(globlist_name) + + with open(globlist_name) as globlist: + # Paths in |files| and |to_check| must use unix separators. Using a set + # ensures no unwanted duplicates. The files in |to_check| must be in the + # globroot or a subdirectory. + files = set() + to_check = set() + for g in globlist: + g = g.strip() + + # Ignore blank lines + if not g: + continue + + # Toggle error checking. + if g == _START_IGNORE_EXPANSIONS_OUTSIDE_GLOBLIST_DIR: + check_expansions_outside_globlist_dir = False + elif g == _STOP_IGNORE_EXPANSIONS_OUTSIDE_GLOBLIST_DIR: + check_expansions_outside_globlist_dir = True + + # Ignore comments. + if not g or g.startswith('#'): + continue + + # Exclusions are prefixed with '-'. + is_exclusion = g.startswith('-') + if is_exclusion: + g = g[1:] + + (combined, + root_relative) = combine_potentially_repository_root_relative_paths( + glob_root, g) + + prefix_size = len(glob_root) + if not root_relative: + # We need to account for the separator. + prefix_size += 1 + + expansion = glob.glob(combined, recursive=True) + + # Filter out directories. + expansion = [f for f in expansion if os.path.isfile(f)] + + if check_expansions_outside_globlist_dir: + for f in expansion: + relative = os.path.relpath(f, globlist_dir) + if relative.startswith('..'): + raise Exception(f'Globlist expansion outside globlist dir: {f}') + + # Make relative to |glob_root|. + expansion = [f[prefix_size:] for f in expansion] + + # Handle Windows backslashes + expansion = [f.replace('\\', '/') for f in expansion] + + # Since paths in |expansion| only use unix separators, it is safe to + # compare for both the purpose of exclusion and addition. + if is_exclusion: + files = files.difference(expansion) + else: + files = files.union(expansion) + + # Return a sorted list. + return sorted(files) + + +def compare_lists(a, b): + differ = difflib.Differ() + full_diff = differ.compare(a, b) + lines = [d for d in full_diff if not d.startswith(' ')] + additions = [l[2:] for l in lines if l.startswith('+ ')] + removals = [l[2:] for l in lines if l.startswith('- ')] + return (additions, removals) + + +def write_filelist(filelist_name, files, header): + try: + with open(filelist_name, 'w', encoding='utf-8', newline='') as filelist: + if not _HEADER_PATTERN.search(header): + header = _HEADER + filelist.write(header) + for file in files: + filelist.write(f'{file}\n') + except Exception as e: + print_error(f'Could not write file list: {filelist_name}', + f'{type(e)}: {e}') + return [] + + +def process_filelist(filelist, globlist, globroot, check=False, verbose=False): + files_from_globlist = [] + try: + files_from_globlist = parse_and_expand_globlist(globlist, globroot) + except Exception as e: + if verbose: + print_error(f'Could not read glob list: {globlist}', f'{type(e)}: {e}') + return 1 + + (files, header) = parse_filelist(filelist) + + (additions, removals) = compare_lists(files, files_from_globlist) + to_ignore = set() + + # Ignore additions of untracked files. + if additions: + directories = set([os.path.dirname(f) for f in additions]) + tracked_files = set() + for d in directories: + (combined, + root_relative) = combine_potentially_repository_root_relative_paths( + globroot, d) + relative = os.path.relpath(combined, globroot) + tracked_files = tracked_files.union( + get_tracked_files(relative, globroot, root_relative, verbose)) + to_ignore = set(additions).difference(tracked_files) + additions = [f for f in additions if f in tracked_files] + + files_from_globlist = [f for f in files_from_globlist if f not in to_ignore] + + if check: + if not _HEADER_PATTERN.search(header): + if verbose: + print_error(f'Unexpected header for {filelist}', f'{header}') + return 1 + if not additions and not removals: + return 0 + if verbose: + pretty_additions = ['+ ' + f for f in additions] + pretty_removals = ['- ' + f for f in removals] + pretty_diff = '\n'.join(pretty_additions + pretty_removals) + print_error('File list does not match glob expansion', f'{pretty_diff}') + return 1 + else: + write_filelist(filelist, files_from_globlist, header) + return 0 + + +def main(args): + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument('filelist', help='Contains one file per line') + parser.add_argument('globlist', + help='Contains globs that, when expanded, ' + 'should match the filelist. Use ' + '--help for details on syntax') + parser.add_argument('globroot', + help='Directory from which globs are relative') + parser.add_argument('-c', + '--check', + action='store_true', + help='Prevents modifying the file list') + parser.add_argument('-v', + '--verbose', + action='store_true', + help='Use this to print details on differences') + args = parser.parse_args() + return process_filelist(args.filelist, + args.globlist, + args.globroot, + check=args.check, + verbose=args.verbose) + + +def print_error(error_message, error_info): + """ Print the `error_message` with additional `error_info` """ + color_start, color_end = adapted_color_for_output(TERMINAL_ERROR_COLOR, + TERMINAL_RESET_COLOR) + + error_message = color_start + 'ERROR: ' + error_message + color_end + if len(error_info) > 0: + error_message = error_message + '\n' + error_info + print(error_message, file=sys.stderr) + + +def adapted_color_for_output(color_start, color_end): + """ Returns a the `color_start`, `color_end` tuple if the output is a + terminal, or empty strings otherwise """ + if not sys.stdout.isatty(): + return '', '' + return color_start, color_end + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/lacros/BUILD.gn b/build/lacros/BUILD.gn index 216010a42e48..0a93891469a0 100644 --- a/build/lacros/BUILD.gn +++ b/build/lacros/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -8,13 +8,21 @@ import("//build/util/process_version.gni") python_library("lacros_resource_sizes_py") { pydeps_file = "lacros_resource_sizes.pydeps" data = [ "//buildtools/third_party/eu-strip/bin/eu-strip" ] - data_deps = [ "//third_party/catapult/tracing:convert_chart_json" ] + data_deps = [ + "//build/util:test_results", + "//third_party/catapult/tracing:convert_chart_json", + ] } -process_version("lacros_version_metadata") { - sources = [ "//chrome/VERSION" ] - - template_file = "metadata.json.in" - output = "$root_out_dir/metadata.json" - process_only = true +# Lacros is built with "{arch}-generic" configuration. However, in Chrome +# OS, it is just "one board variation", so the libraries on the *-generic +# boards may not be compatible with the ones on the actual DUTs. +# One of the common pattern recently we hit is symbols exposed by libgcc. +# The symbols start to be exposed recently because of libunwind transition +# and along with it they are or are not re-exposed by other libraries, too, +# depending on per-board implementation. +# To mitigate the situation, marking -shared-libgcc to look up the system +# libgcc always. +config("optional_shared_libgcc") { + ldflags = [ "-shared-libgcc" ] } diff --git a/build/lacros/OWNERS b/build/lacros/OWNERS index aae4f73726f1..e9865487453b 100644 --- a/build/lacros/OWNERS +++ b/build/lacros/OWNERS @@ -1,3 +1,2 @@ svenzheng@chromium.org -liaoyuke@chromium.org erikchen@chromium.org diff --git a/build/lacros/PRESUBMIT.py b/build/lacros/PRESUBMIT.py index 1394a42c2cd0..642ee7e8fbd4 100644 --- a/build/lacros/PRESUBMIT.py +++ b/build/lacros/PRESUBMIT.py @@ -1,12 +1,22 @@ -# Copyright (c) 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Presubmit script for changes affecting //build/lacros""" +USE_PYTHON3 = True + def _CommonChecks(input_api, output_api): + # Don't run lacros tests on Windows. + if input_api.is_windows: + return [] tests = input_api.canned_checks.GetUnitTestsInDirectory( - input_api, output_api, '.', [r'^.+_test\.py$']) + input_api, + output_api, + '.', [r'^.+_test\.py$'], + run_on_python2=False, + run_on_python3=True, + skip_shebang_check=True) return input_api.RunTests(tests) diff --git a/build/lacros/README.md b/build/lacros/README.md new file mode 100644 index 000000000000..be0a243f0d94 --- /dev/null +++ b/build/lacros/README.md @@ -0,0 +1,11 @@ +This folder contains code for running lacros in tests. + +This includes: +* test_runner.py +Run linux-lacros related tests. + +* mojo_connection_lacros_launcher +Script for launching lacros for debugging. + +* lacros_resource_sizes.py +Monitoring lacros binary size script used by builders. diff --git a/build/lacros/lacros_resource_sizes.gni b/build/lacros/lacros_resource_sizes.gni index 0c50e0fe3403..7d1e91965a9b 100644 --- a/build/lacros/lacros_resource_sizes.gni +++ b/build/lacros/lacros_resource_sizes.gni @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/lacros/lacros_resource_sizes.py b/build/lacros/lacros_resource_sizes.py index bf791c6531b7..6004ae7f8832 100755 --- a/build/lacros/lacros_resource_sizes.py +++ b/build/lacros/lacros_resource_sizes.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2020 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Reports binary size metrics for LaCrOS build artifacts. @@ -16,6 +16,10 @@ import subprocess import sys import tempfile +SRC_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')) +sys.path.insert(0, os.path.join(SRC_DIR, 'build', 'util')) +from lib.results import result_sink +from lib.results import result_types @contextlib.contextmanager @@ -55,7 +59,6 @@ def _SysPath(path): _BASE_CHART = { 'format_version': '0.1', 'benchmark_name': 'resource_sizes', - 'benchmark_description': 'LaCrOS resource size information.', 'trace_rerun_options': [], 'charts': {} } @@ -88,28 +91,39 @@ def __init__(self, paths, title, track_stripped=False, self.track_stripped = track_stripped self.track_compressed = track_compressed - -# List of disjoint build artifact groups for size tracking. This list should be -# synched with lacros-amd64-generic-binary-size-rel builder contents (specified -# in # //infra/config/subprojects/chromium/ci.star) and -# chromeos-amd64-generic-lacros-internal builder (specified in src-internal). + def __eq__(self, other): + """Overrides the default implementation""" + if isinstance(other, _Group): + return (self.paths == other.paths) & (self.title == other.title) & ( + self.track_stripped == other.track_stripped) & ( + self.track_compressed == other.track_compressed) + return False + +# Common artifacts in official builder lacros-arm32 and lacros64 in +# src-internal. The artifcts can be found in +# chromium/src-internal/testing/buildbot/archive/lacros64.json and +# chromium/src-internal/testing/buildbot/archive/lacros-arm32.json +# chromium/src-internal/testing/buildbot/archive/lacros-arm64.json _TRACKED_GROUPS = [ _Group(paths=['chrome'], title='File: chrome', track_stripped=True, track_compressed=True), - _Group(paths=['crashpad_handler'], title='File: crashpad_handler'), + _Group(paths=['chrome_crashpad_handler'], + title='File: chrome_crashpad_handler'), _Group(paths=['icudtl.dat'], title='File: icudtl.dat'), + _Group(paths=['icudtl.dat.hash'], title='File: icudtl.dat.hash'), + _Group(paths=['libEGL.so'], title='File: libEGL.so'), + _Group(paths=['libGLESv2.so'], title='File: libGLESv2.so'), _Group(paths=['nacl_helper'], title='File: nacl_helper'), - _Group(paths=['nacl_irt_x86_64.nexe'], title='File: nacl_irt_x86_64.nexe'), _Group(paths=['resources.pak'], title='File: resources.pak'), _Group(paths=[ - 'chrome_100_percent.pak', 'chrome_200_percent.pak', 'headless_lib.pak' + 'chrome_100_percent.pak', 'chrome_200_percent.pak', + 'headless_lib_data.pak', 'headless_lib_strings.pak' ], title='Group: Other PAKs'), _Group(paths=['snapshot_blob.bin'], title='Group: Misc'), _Group(paths=['locales/'], title='Dir: locales'), - _Group(paths=['swiftshader/'], title='Dir: swiftshader'), _Group(paths=['WidevineCdm/'], title='Dir: WidevineCdm'), ] @@ -239,13 +253,17 @@ def _dump_chart_json(output_dir, chartjson): histogram_path = os.path.join(output_dir, 'perf_results.json') logging.critical('Dumping histograms to %s', histogram_path) - with open(histogram_path, 'w') as json_file: + with open(histogram_path, 'wb') as json_file: json_file.write(histogram_result.stdout) def _run_resource_sizes(args): """Main flow to extract and output size data.""" chartjson = _BASE_CHART.copy() + chartjson.update({ + 'benchmark_description': + ('LaCrOS %s resource size information.' % args.arch) + }) report_func = perf_tests_results_helper.ReportPerfResult total_sizes = collections.Counter() @@ -277,7 +295,25 @@ def report_sizes(sizes, title, track_stripped, track_compressed): value=sizes[_KEY_STRIPPED_GZIPPED], units='bytes') - for g in _TRACKED_GROUPS: + tracked_groups = _TRACKED_GROUPS.copy() + # Architecture amd64 requires artifact nacl_irt_x86_64.nexe. + if args.arch == 'amd64': + tracked_groups.append( + _Group(paths=['nacl_irt_x86_64.nexe'], + title='File: nacl_irt_x86_64.nexe')) + # Architecture arm32 requires artifact nacl_irt_arm.nexe. + elif args.arch == 'arm32': + tracked_groups.append( + _Group(paths=['nacl_irt_arm.nexe'], title='File: nacl_irt_arm.nexe')) + tracked_groups.append( + _Group(paths=['nacl_helper_bootstrap'], + title='File: nacl_helper_bootstrap')) + # TODO(https://crbug.com/1356761): remove the following part once nacl files + # are available. + elif args.arch == 'arm64': + tracked_groups.remove( + _Group(paths=['nacl_helper'], title='File: nacl_helper')) + for g in tracked_groups: sizes = sum( map(_get_catagorized_filesizes, _visit_paths(args.out_dir, g.paths)), collections.Counter()) @@ -303,6 +339,11 @@ def main(): required=True, type=os.path.realpath, help='Location of the build artifacts.') + argparser.add_argument('--arch', + required=True, + type=str, + help='The architecture of lacros, valid values: amd64,' + ' arm32, arm64') output_group = argparser.add_mutually_exclusive_group() @@ -343,6 +384,14 @@ def main(): json.dump(isolated_script_output, output_file) with open(args.isolated_script_test_output, 'w') as output_file: json.dump(isolated_script_output, output_file) + result_sink_client = result_sink.TryInitClient() + if result_sink_client: + status = result_types.PASS + if not isolated_script_output['valid']: + status = result_types.UNKNOWN + elif isolated_script_output['failures']: + status = result_types.FAIL + result_sink_client.Post(test_name, status, None, None, None) if __name__ == '__main__': diff --git a/build/lacros/lacros_resource_sizes.pydeps b/build/lacros/lacros_resource_sizes.pydeps index 9c0afd35706f..dc6a11699674 100644 --- a/build/lacros/lacros_resource_sizes.pydeps +++ b/build/lacros/lacros_resource_sizes.pydeps @@ -7,6 +7,10 @@ ../../third_party/catapult/tracing/tracing/value/__init__.py ../../third_party/catapult/tracing/tracing/value/convert_chart_json.py ../../third_party/catapult/tracing/tracing_project.py +../util/lib/__init__.py ../util/lib/common/perf_result_data_type.py ../util/lib/common/perf_tests_results_helper.py +../util/lib/results/__init__.py +../util/lib/results/result_sink.py +../util/lib/results/result_types.py lacros_resource_sizes.py diff --git a/build/lacros/mojo_connection_lacros_launcher.py b/build/lacros/mojo_connection_lacros_launcher.py index 786176e38485..a0f2cfcd2ad8 100755 --- a/build/lacros/mojo_connection_lacros_launcher.py +++ b/build/lacros/mojo_connection_lacros_launcher.py @@ -1,6 +1,6 @@ #!/usr/bin/env vpython3 # -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Helps launch lacros-chrome with mojo connection established on Linux @@ -29,8 +29,12 @@ import argparse import array import contextlib +import getpass +import grp import os import pathlib +import pwd +import resource import socket import sys import subprocess @@ -55,12 +59,12 @@ def __exit__(self, exc_type, exc_value, trace): def _ReceiveFDs(sock): """Receives FDs from ash-chrome that will be used to launch lacros-chrome. - Args: - sock: A connected unix domain socket. + Args: + sock: A connected unix domain socket. - Returns: - File objects for the mojo connection and maybe startup data file. - """ + Returns: + File objects for the mojo connection and maybe startup data file. + """ # This function is borrowed from with modifications: # https://docs.python.org/3/library/socket.html#socket.socket.recvmsg fds = array.array("i") # Array of ints @@ -88,25 +92,58 @@ def _ReceiveFDs(sock): 'CMSG_LEN is unexpected: %d' % (len(cmsg_data), )) fds.frombytes(cmsg_data[:]) - assert version == b'\x00', 'Expecting version code to be 0' - assert len(fds) in (1, 2, 3), 'Expecting exactly 1, 2, or 3 FDs' - legacy_mojo_fd = os.fdopen(fds[0]) - startup_fd = None if len(fds) < 2 else os.fdopen(fds[1]) - mojo_fd = None if len(fds) < 3 else os.fdopen(fds[2]) - return legacy_mojo_fd, startup_fd, mojo_fd + if version == b'\x01': + assert len(fds) == 2, 'Expecting exactly 2 FDs' + startup_fd = os.fdopen(fds[0]) + mojo_fd = os.fdopen(fds[1]) + elif version: + raise AssertionError('Unknown version: \\x%s' % version.hex()) + else: + raise AssertionError('Failed to receive startup message from ash-chrome. ' + 'Make sure you\'re logged in to Chrome OS.') + return startup_fd, mojo_fd def _MaybeClosing(fileobj): """Returns closing context manager, if given fileobj is not None. - If the given fileobj is none, return nullcontext. - """ + If the given fileobj is none, return nullcontext. + """ return (contextlib.closing if fileobj else NullContext)(fileobj) +def _ApplyCgroups(): + """Applies cgroups used in ChromeOS to lacros chrome as well.""" + # Cgroup directories taken from ChromeOS session_manager job configuration. + UI_FREEZER_CGROUP_DIR = '/sys/fs/cgroup/freezer/ui' + UI_CPU_CGROUP_DIR = '/sys/fs/cgroup/cpu/ui' + pid = os.getpid() + with open(os.path.join(UI_CPU_CGROUP_DIR, 'tasks'), 'a') as f: + f.write(str(pid) + '\n') + with open(os.path.join(UI_FREEZER_CGROUP_DIR, 'cgroup.procs'), 'a') as f: + f.write(str(pid) + '\n') + + +def _PreExec(uid, gid, groups): + """Set environment up for running the chrome binary.""" + # Nice and realtime priority values taken ChromeOSs session_manager job + # configuration. + resource.setrlimit(resource.RLIMIT_NICE, (40, 40)) + resource.setrlimit(resource.RLIMIT_RTPRIO, (10, 10)) + os.setgroups(groups) + os.setgid(gid) + os.setuid(uid) + + def Main(): arg_parser = argparse.ArgumentParser() arg_parser.usage = __doc__ + arg_parser.add_argument( + '-r', + '--root-env-setup', + action='store_true', + help='Set typical cgroups and environment for chrome. ' + 'If this is set, this script must be run as root.') arg_parser.add_argument( '-s', '--socket-path', @@ -119,20 +156,25 @@ def Main(): assert 'XDG_RUNTIME_DIR' in os.environ assert os.environ.get('EGL_PLATFORM') == 'surfaceless' + if flags.root_env_setup: + # Check if we are actually root and error otherwise. + assert getpass.getuser() == 'root', \ + 'Root required environment flag specified, but user is not root.' + # Apply necessary cgroups to our own process, so they will be inherited by + # lacros chrome. + _ApplyCgroups() + else: + print('WARNING: Running chrome without appropriate environment. ' + 'This may affect performance test results. ' + 'Set -r and run as root to avoid this.') + with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as sock: sock.connect(flags.socket_path.as_posix()) - legacy_mojo_connection, startup_connection, mojo_connection = ( - _ReceiveFDs(sock)) + startup_connection, mojo_connection = (_ReceiveFDs(sock)) - with _MaybeClosing(legacy_mojo_connection), \ - _MaybeClosing(startup_connection), \ - _MaybeClosing(mojo_connection): + with _MaybeClosing(startup_connection), _MaybeClosing(mojo_connection): cmd = args[:] pass_fds = [] - if legacy_mojo_connection: - cmd.append('--mojo-platform-channel-handle=%d' % - legacy_mojo_connection.fileno()) - pass_fds.append(legacy_mojo_connection.fileno()) if startup_connection: cmd.append('--cros-startup-data-fd=%d' % startup_connection.fileno()) pass_fds.append(startup_connection.fileno()) @@ -140,7 +182,26 @@ def Main(): cmd.append('--crosapi-mojo-platform-channel-handle=%d' % mojo_connection.fileno()) pass_fds.append(mojo_connection.fileno()) - proc = subprocess.Popen(cmd, pass_fds=pass_fds) + + env = os.environ.copy() + if flags.root_env_setup: + username = 'chronos' + p = pwd.getpwnam(username) + uid = p.pw_uid + gid = p.pw_gid + groups = [g.gr_gid for g in grp.getgrall() if username in g.gr_mem] + env['HOME'] = p.pw_dir + env['LOGNAME'] = username + env['USER'] = username + + def fn(): + return _PreExec(uid, gid, groups) + else: + + def fn(): + return None + + proc = subprocess.Popen(cmd, pass_fds=pass_fds, preexec_fn=fn) return proc.wait() diff --git a/build/lacros/test_runner.py b/build/lacros/test_runner.py index 982c04501ba7..856acd012e69 100755 --- a/build/lacros/test_runner.py +++ b/build/lacros/test_runner.py @@ -1,6 +1,6 @@ -#!/usr/bin/env vpython +#!/usr/bin/env python3 # -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """This script facilitates running tests for lacros on Linux. @@ -9,7 +9,7 @@ to setup build directory with the lacros-chrome-on-linux build configuration, and corresponding test targets are built successfully. - * Example usages: +Example usages ./build/lacros/test_runner.py test out/lacros/url_unittests ./build/lacros/test_runner.py test out/lacros/browser_tests @@ -39,9 +39,22 @@ The above command starts ash-chrome with xvfb instead of an X11 window, and it's useful when running tests without a display attached, such as sshing. + + For version skew testing when passing --ash-chrome-path-override, the runner + will try to find the ash major version and Lacros major version. If ash is + newer(major version larger), the runner will not run any tests and just + returns success. + +Interactively debugging tests + + Any of the previous examples accept the switches + --gdb + --lldb + to run the tests in the corresponding debugger. """ import argparse +import json import os import logging import re @@ -57,21 +70,22 @@ os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)) sys.path.append(os.path.join(_SRC_ROOT, 'third_party', 'depot_tools')) -# Base GS URL to store prebuilt ash-chrome. -_GS_URL_BASE = 'gs://ash-chromium-on-linux-prebuilts/x86_64' -# Latest file version. -_GS_URL_LATEST_FILE = _GS_URL_BASE + '/latest/ash-chromium.txt' +# The cipd path for prebuilt ash chrome. +_ASH_CIPD_PATH = 'chromium/testing/linux-ash-chromium/x86_64/ash.zip' -# GS path to the zipped ash-chrome build with any given version. -_GS_ASH_CHROME_PATH = 'ash-chromium.zip' # Directory to cache downloaded ash-chrome versions to avoid re-downloading. _PREBUILT_ASH_CHROME_DIR = os.path.join(os.path.dirname(__file__), 'prebuilt_ash_chrome') +# File path to the asan symbolizer executable. +_ASAN_SYMBOLIZER_PATH = os.path.join(_SRC_ROOT, 'tools', 'valgrind', 'asan', + 'asan_symbolize.py') + # Number of seconds to wait for ash-chrome to start. -ASH_CHROME_TIMEOUT_SECONDS = 10 +ASH_CHROME_TIMEOUT_SECONDS = ( + 300 if os.environ.get('ASH_WRAPPER', None) else 10) # List of targets that require ash-chrome as a Wayland server in order to run. _TARGETS_REQUIRE_ASH_CHROME = [ @@ -102,9 +116,23 @@ # are allowed. For now we only enable crosapi in targets that run tests # serially. 'interactive_ui_tests', - 'lacros_chrome_browsertests' + 'lacros_chrome_browsertests', + 'lacros_chrome_browsertests_run_in_series' ] +# Default test filter file for each target. These filter files will be +# used by default if no other filter file get specified. +_DEFAULT_FILTER_FILES_MAPPING = { + 'browser_tests': 'linux-lacros.browser_tests.filter', + 'components_unittests': 'linux-lacros.components_unittests.filter', + 'content_browsertests': 'linux-lacros.content_browsertests.filter', + 'interactive_ui_tests': 'linux-lacros.interactive_ui_tests.filter', + 'lacros_chrome_browsertests': + 'linux-lacros.lacros_chrome_browsertests.filter', + 'sync_integration_tests': 'linux-lacros.sync_integration_tests.filter', + 'unit_tests': 'linux-lacros.unit_tests.filter', +} + def _GetAshChromeDirPath(version): """Returns a path to the dir storing the downloaded version of ash-chrome.""" @@ -135,7 +163,7 @@ def _remove_unused_ash_chrome_versions(version_to_skip): # them to keep the directory clean. os.remove(p) continue - chrome_path = os.path.join(p, 'chrome') + chrome_path = os.path.join(p, 'test_ash_chrome') if not os.path.exists(chrome_path): chrome_path = p age = time.time() - os.path.getatime(chrome_path) @@ -145,28 +173,71 @@ def _remove_unused_ash_chrome_versions(version_to_skip): 'past %d days', p, days) shutil.rmtree(p) -def _GsutilCopyWithRetry(gs_path, local_name, retry_times=3): - """Gsutil copy with retry. + +def _GetLatestVersionOfAshChrome(): + '''Get the latest ash chrome version. + + Get the package version info with canary ref. + + Returns: + A string with the chrome version. + + Raises: + RuntimeError: if we can not get the version. + ''' + cp = subprocess.run( + ['cipd', 'describe', _ASH_CIPD_PATH, '-version', 'canary'], + capture_output=True) + assert (cp.returncode == 0) + groups = re.search(r'version:(?P[\d\.]+)', str(cp.stdout)) + if not groups: + raise RuntimeError('Can not find the version. Error message: %s' % + cp.stdout) + return groups.group('version') + + +def _DownloadAshChromeFromCipd(path, version): + '''Download the ash chrome with the requested version. Args: - gs_path: The gs path for remote location. - local_name: The local file name. - retry_times: The total try times if the gsutil call fails. + path: string for the downloaded ash chrome folder. + version: string for the ash chrome version. + + Returns: + A string representing the path for the downloaded ash chrome. + ''' + with tempfile.TemporaryDirectory() as temp_dir: + ensure_file_path = os.path.join(temp_dir, 'ensure_file.txt') + f = open(ensure_file_path, 'w+') + f.write(_ASH_CIPD_PATH + ' version:' + version) + f.close() + subprocess.run( + ['cipd', 'ensure', '-ensure-file', ensure_file_path, '-root', path]) + + +def _DoubleCheckDownloadedAshChrome(path, version): + '''Check the downloaded ash is the expected version. + + Double check by running the chrome binary with --version. + + Args: + path: string for the downloaded ash chrome folder. + version: string for the expected ash chrome version. Raises: - RuntimeError: If failed to download the specified version, for example, - if the version is not present on gcs. - """ - import download_from_google_storage - gsutil = download_from_google_storage.Gsutil( - download_from_google_storage.GSUTIL_DEFAULT_PATH) - exit_code = 1 - retry = 0 - while exit_code and retry < retry_times: - retry += 1 - exit_code = gsutil.call('cp', gs_path, local_name) - if exit_code: - raise RuntimeError('Failed to download: "%s"' % gs_path) + RuntimeError if no test_ash_chrome binary can be found. + ''' + test_ash_chrome = os.path.join(path, 'test_ash_chrome') + if not os.path.exists(test_ash_chrome): + raise RuntimeError('Can not find test_ash_chrome binary under %s' % path) + cp = subprocess.run([test_ash_chrome, '--version'], capture_output=True) + assert (cp.returncode == 0) + if str(cp.stdout).find(version) == -1: + logging.warning( + 'The downloaded ash chrome version is %s, but the ' + 'expected ash chrome is %s. There is a version mismatch. Please ' + 'file a bug to OS>Lacros so someone can take a look.' % + (cp.stdout, version)) def _DownloadAshChromeIfNecessary(version): @@ -186,7 +257,7 @@ def IsAshChromeDirValid(ash_chrome_dir): # runner process gets killed in the middle of unzipping (~2 seconds), but # it's unlikely for the assumption to break in practice. return os.path.isdir(ash_chrome_dir) and os.path.isfile( - os.path.join(ash_chrome_dir, 'chrome')) + os.path.join(ash_chrome_dir, 'test_ash_chrome')) ash_chrome_dir = _GetAshChromeDirPath(version) if IsAshChromeDirValid(ash_chrome_dir): @@ -194,40 +265,13 @@ def IsAshChromeDirValid(ash_chrome_dir): shutil.rmtree(ash_chrome_dir, ignore_errors=True) os.makedirs(ash_chrome_dir) - with tempfile.NamedTemporaryFile() as tmp: - logging.info('Ash-chrome version: %s', version) - gs_path = _GS_URL_BASE + '/' + version + '/' + _GS_ASH_CHROME_PATH - _GsutilCopyWithRetry(gs_path, tmp.name) - - # https://bugs.python.org/issue15795. ZipFile doesn't preserve permissions. - # And in order to workaround the issue, this function is created and used - # instead of ZipFile.extractall(). - # The solution is copied from: - # https://stackoverflow.com/questions/42326428/zipfile-in-python-file-permission - def ExtractFile(zf, info, extract_dir): - zf.extract(info.filename, path=extract_dir) - perm = info.external_attr >> 16 - os.chmod(os.path.join(extract_dir, info.filename), perm) - - with zipfile.ZipFile(tmp.name, 'r') as zf: - # Extra all files instead of just 'chrome' binary because 'chrome' needs - # other resources and libraries to run. - for info in zf.infolist(): - ExtractFile(zf, info, ash_chrome_dir) - + _DownloadAshChromeFromCipd(ash_chrome_dir, version) + _DoubleCheckDownloadedAshChrome(ash_chrome_dir, version) _remove_unused_ash_chrome_versions(version) -def _GetLatestVersionOfAshChrome(): - """Returns the latest version of uploaded ash-chrome.""" - with tempfile.NamedTemporaryFile() as tmp: - _GsutilCopyWithRetry(_GS_URL_LATEST_FILE, tmp.name) - with open(tmp.name, 'r') as f: - return f.read().strip() - - def _WaitForAshChromeToStart(tmp_xdg_dir, lacros_mojo_socket_file, - enable_mojo_crosapi): + enable_mojo_crosapi, ash_ready_file): """Waits for Ash-Chrome to be up and running and returns a boolean indicator. Determine whether ash-chrome is up and running by checking whether two files @@ -241,27 +285,210 @@ def _WaitForAshChromeToStart(tmp_xdg_dir, lacros_mojo_socket_file, lacros_mojo_socket_file (str): Path to the lacros mojo socket file. enable_mojo_crosapi (bool): Whether to bootstrap the crosapi mojo interface between ash and the lacros test binary. + ash_ready_file (str): Path to a non-existing file. After ash is ready for + testing, the file will be created. Returns: A boolean indicating whether Ash-chrome is up and running. """ def IsAshChromeReady(tmp_xdg_dir, lacros_mojo_socket_file, - enable_mojo_crosapi): - return (len(os.listdir(tmp_xdg_dir)) >= 2 - and (not enable_mojo_crosapi - or os.path.exists(lacros_mojo_socket_file))) + enable_mojo_crosapi, ash_ready_file): + # There should be 2 wayland files. + if len(os.listdir(tmp_xdg_dir)) < 2: + return False + if enable_mojo_crosapi and not os.path.exists(lacros_mojo_socket_file): + return False + return os.path.exists(ash_ready_file) time_counter = 0 while not IsAshChromeReady(tmp_xdg_dir, lacros_mojo_socket_file, - enable_mojo_crosapi): + enable_mojo_crosapi, ash_ready_file): time.sleep(0.5) time_counter += 0.5 if time_counter > ASH_CHROME_TIMEOUT_SECONDS: break return IsAshChromeReady(tmp_xdg_dir, lacros_mojo_socket_file, - enable_mojo_crosapi) + enable_mojo_crosapi, ash_ready_file) + + +def _ExtractAshMajorVersion(file_path): + """Extract major version from file_path. + + File path like this: + ../../lacros_version_skew_tests_v94.0.4588.0/test_ash_chrome + + Returns: + int representing the major version. Or 0 if it can't extract + major version. + """ + m = re.search( + 'lacros_version_skew_tests_v(?P[0-9]+).[0-9]+.[0-9]+.[0-9]+/', + file_path) + if (m and 'version' in m.groupdict().keys()): + return int(m.group('version')) + logging.warning('Can not find the ash version in %s.' % file_path) + # Returns ash major version as 0, so we can still run tests. + # This is likely happen because user is running in local environments. + return 0 + + +def _FindLacrosMajorVersionFromMetadata(): + # This handles the logic on bots. When running on bots, + # we don't copy source files to test machines. So we build a + # metadata.json file which contains version information. + if not os.path.exists('metadata.json'): + logging.error('Can not determine current version.') + # Returns 0 so it can't run any tests. + return 0 + version = '' + with open('metadata.json', 'r') as file: + content = json.load(file) + version = content['content']['version'] + return int(version[:version.find('.')]) + + +def _FindLacrosMajorVersion(): + """Returns the major version in the current checkout. + + It would try to read src/chrome/VERSION. If it's not available, + then try to read metadata.json. + + Returns: + int representing the major version. Or 0 if it fails to + determine the version. + """ + version_file = os.path.abspath( + os.path.join(os.path.abspath(os.path.dirname(__file__)), + '../../chrome/VERSION')) + # This is mostly happens for local development where + # src/chrome/VERSION exists. + if os.path.exists(version_file): + lines = open(version_file, 'r').readlines() + return int(lines[0][lines[0].find('=') + 1:-1]) + return _FindLacrosMajorVersionFromMetadata() + + +def _ParseSummaryOutput(forward_args): + """Find the summary output file path. + + Args: + forward_args (list): Args to be forwarded to the test command. + + Returns: + None if not found, or str representing the output file path. + """ + logging.warning(forward_args) + for arg in forward_args: + if arg.startswith('--test-launcher-summary-output='): + return arg[len('--test-launcher-summary-output='):] + return None + + +def _IsRunningOnBots(forward_args): + """Detects if the script is running on bots or not. + + Args: + forward_args (list): Args to be forwarded to the test command. + + Returns: + True if the script is running on bots. Otherwise returns False. + """ + return '--test-launcher-bot-mode' in forward_args + + +def _KillNicely(proc, timeout_secs=2, first_wait_secs=0): + """Kills a subprocess nicely. + + Args: + proc: The subprocess to kill. + timeout_secs: The timeout to wait in seconds. + first_wait_secs: The grace period before sending first SIGTERM in seconds. + """ + if not proc: + return + + if first_wait_secs: + try: + proc.wait(first_wait_secs) + return + except subprocess.TimeoutExpired: + pass + + if proc.poll() is None: + proc.terminate() + try: + proc.wait(timeout_secs) + except subprocess.TimeoutExpired: + proc.kill() + proc.wait() + + +def _ClearDir(dirpath): + """Deletes everything within the directory. + + Args: + dirpath: The path of the directory. + """ + for e in os.scandir(dirpath): + if e.is_dir(): + shutil.rmtree(e.path) + elif e.is_file(): + os.remove(e.path) + + +def _LaunchDebugger(args, forward_args, test_env): + """Launches the requested debugger. + + This is used to wrap the test invocation in a debugger. It returns the + created Popen class of the debugger process. + + Args: + args (dict): Args for this script. + forward_args (list): Args to be forwarded to the test command. + test_env (dict): Computed environment variables for the test. + """ + logging.info('Starting debugger.') + + # Force the tests into single-process-test mode for debugging unless manually + # specified. Otherwise the tests will run in a child process that the debugger + # won't be attached to and the debugger won't do anything. + if not ("--single-process" in forward_args + or "--single-process-tests" in forward_args): + forward_args += ["--single-process-tests"] + + # Adding --single-process-tests can cause some tests to fail when they're + # run in the same process. Forcing the user to specify a filter will prevent + # a later error. + if not [i for i in forward_args if i.startswith("--gtest_filter")]: + logging.error("""Interactive debugging requested without --gtest_filter + +This script adds --single-process-tests to support interactive debugging but +some tests will fail in this mode unless run independently. To debug a test +specify a --gtest_filter=Foo.Bar to name the test you want to debug. +""") + sys.exit(1) + + # This code attempts to source the debugger configuration file. Some + # users will have this in their init but sourcing it more than once is + # harmless and helps people that haven't configured it. + if args.gdb: + gdbinit_file = os.path.normpath( + os.path.join(os.path.realpath(__file__), "../../../tools/gdb/gdbinit")) + debugger_command = [ + 'gdb', '--init-eval-command', 'source ' + gdbinit_file, '--args' + ] + else: + lldbinit_dir = os.path.normpath( + os.path.join(os.path.realpath(__file__), "../../../tools/lldb")) + debugger_command = [ + 'lldb', '-O', + "script sys.path[:0] = ['%s']" % lldbinit_dir, '-O', + 'script import lldbinit', '--' + ] + debugger_command += [args.command] + forward_args + return subprocess.Popen(debugger_command, env=test_env) def _RunTestWithAshChrome(args, forward_args): @@ -269,10 +496,39 @@ def _RunTestWithAshChrome(args, forward_args): Args: args (dict): Args for this script. - forward_args (dict): Args to be forwarded to the test command. + forward_args (list): Args to be forwarded to the test command. """ if args.ash_chrome_path_override: ash_chrome_file = args.ash_chrome_path_override + ash_major_version = _ExtractAshMajorVersion(ash_chrome_file) + lacros_major_version = _FindLacrosMajorVersion() + if ash_major_version > lacros_major_version: + logging.warning('''Not running any tests, because we do not \ +support version skew testing for Lacros M%s against ash M%s''' % + (lacros_major_version, ash_major_version)) + # Create an empty output.json file so result adapter can read + # the file. Or else result adapter will report no file found + # and result infra failure. + output_json = _ParseSummaryOutput(forward_args) + if output_json: + with open(output_json, 'w') as f: + f.write("""{"all_tests":[],"disabled_tests":[],"global_tags":[], +"per_iteration_data":[],"test_locations":{}}""") + # Although we don't run any tests, this is considered as success. + return 0 + if not os.path.exists(ash_chrome_file): + logging.error("""Can not find ash chrome at %s. Did you download \ +the ash from CIPD? If you don't plan to build your own ash, you need \ +to download first. Example commandlines: + $ cipd auth-login + $ echo "chromium/testing/linux-ash-chromium/x86_64/ash.zip \ +version:92.0.4515.130" > /tmp/ensure-file.txt + $ cipd ensure -ensure-file /tmp/ensure-file.txt \ +-root lacros_version_skew_tests_v92.0.4515.130 + Then you can use --ash-chrome-path-override=\ +lacros_version_skew_tests_v92.0.4515.130/test_ash_chrome +""" % ash_chrome_file) + return 1 elif args.ash_chrome_path: ash_chrome_file = args.ash_chrome_path else: @@ -282,7 +538,7 @@ def _RunTestWithAshChrome(args, forward_args): logging.info('Ash-chrome version: %s', ash_chrome_version) ash_chrome_file = os.path.join(_GetAshChromeDirPath(ash_chrome_version), - 'chrome') + 'test_ash_chrome') try: # Starts Ash-Chrome. tmp_xdg_dir_name = tempfile.mkdtemp() @@ -293,8 +549,10 @@ def _RunTestWithAshChrome(args, forward_args): lacros_mojo_socket_file = '%s/lacros.sock' % tmp_ash_data_dir_name lacros_mojo_socket_arg = ('--lacros-mojo-socket-for-testing=%s' % lacros_mojo_socket_file) + ash_ready_file = '%s/ash_ready.txt' % tmp_ash_data_dir_name enable_mojo_crosapi = any(t == os.path.basename(args.command) for t in _TARGETS_REQUIRE_MOJO_CROSAPI) + ash_wayland_socket_name = 'wayland-exo' ash_process = None ash_env = os.environ.copy() @@ -304,64 +562,148 @@ def _RunTestWithAshChrome(args, forward_args): '--user-data-dir=%s' % tmp_ash_data_dir_name, '--enable-wayland-server', '--no-startup-window', - '--use-fake-ml-service-for-test', + '--disable-input-event-activation-protection', + '--disable-lacros-keep-alive', + '--disable-login-lacros-opening', + '--enable-field-trial-config', + '--enable-features=LacrosSupport,LacrosPrimary,LacrosOnly', + '--ash-ready-file-path=%s' % ash_ready_file, + '--wayland-server-socket=%s' % ash_wayland_socket_name, ] + if '--enable-pixel-output-in-tests' not in forward_args: + ash_cmd.append('--disable-gl-drawing-for-tests') + if enable_mojo_crosapi: ash_cmd.append(lacros_mojo_socket_arg) + # Users can specify a wrapper for the ash binary to do things like + # attaching debuggers. For example, this will open a new terminal window + # and run GDB. + # $ export ASH_WRAPPER="gnome-terminal -- gdb --ex=r --args" + ash_wrapper = os.environ.get('ASH_WRAPPER', None) + if ash_wrapper: + logging.info('Running ash with "ASH_WRAPPER": %s', ash_wrapper) + ash_cmd = list(ash_wrapper.split()) + ash_cmd + ash_process_has_started = False total_tries = 3 num_tries = 0 + ash_start_time = None + + # Create a log file if the user wanted to have one. + ash_log = None + ash_log_path = None + + run_tests_in_debugger = args.gdb or args.lldb + + if args.ash_logging_path: + ash_log_path = args.ash_logging_path + # Put ash logs in a separate file on bots. + # For asan builds, the ash log is not symbolized. In order to + # read the stack strace, we don't redirect logs to another file. + elif _IsRunningOnBots(forward_args) and not args.combine_ash_logs_on_bots: + summary_file = _ParseSummaryOutput(forward_args) + if summary_file: + ash_log_path = os.path.join(os.path.dirname(summary_file), + 'ash_chrome.log') + elif run_tests_in_debugger: + # The debugger is unusable when all Ash logs are getting dumped to the + # same terminal. Redirect to a log file if there isn't one specified. + logging.info("Running in the debugger and --ash-logging-path is not " + + "specified, defaulting to the current directory.") + ash_log_path = 'ash_chrome.log' + + if ash_log_path: + ash_log = open(ash_log_path, 'a') + logging.info('Writing ash-chrome logs to: %s', ash_log_path) + + ash_stdout = ash_log or None + test_stdout = None + + # Setup asan symbolizer. + ash_symbolize_process = None + test_symbolize_process = None + should_symbolize = False + if args.asan_symbolize_output and os.path.exists(_ASAN_SYMBOLIZER_PATH): + should_symbolize = True + ash_symbolize_stdout = ash_stdout + ash_stdout = subprocess.PIPE + test_stdout = subprocess.PIPE + while not ash_process_has_started and num_tries < total_tries: num_tries += 1 - ash_process = subprocess.Popen(ash_cmd, env=ash_env) + ash_start_time = time.monotonic() + logging.info('Starting ash-chrome.') + ash_process = subprocess.Popen(ash_cmd, + env=ash_env, + stdout=ash_stdout, + stderr=subprocess.STDOUT) + + if should_symbolize: + logging.info('Symbolizing ash logs with asan symbolizer.') + ash_symbolize_process = subprocess.Popen([_ASAN_SYMBOLIZER_PATH], + stdin=ash_process.stdout, + stdout=ash_symbolize_stdout, + stderr=subprocess.STDOUT) + # Allow ash_process to receive a SIGPIPE if symbolize process exits. + ash_process.stdout.close() + ash_process_has_started = _WaitForAshChromeToStart( - tmp_xdg_dir_name, lacros_mojo_socket_file, enable_mojo_crosapi) + tmp_xdg_dir_name, lacros_mojo_socket_file, enable_mojo_crosapi, + ash_ready_file) if ash_process_has_started: break logging.warning('Starting ash-chrome timed out after %ds', ASH_CHROME_TIMEOUT_SECONDS) + logging.warning('Are you using test_ash_chrome?') logging.warning('Printing the output of "ps aux" for debugging:') subprocess.call(['ps', 'aux']) - if ash_process and ash_process.poll() is None: - ash_process.kill() + _KillNicely(ash_process) + _KillNicely(ash_symbolize_process, first_wait_secs=1) + + # Clean up for retry. + _ClearDir(tmp_xdg_dir_name) + _ClearDir(tmp_ash_data_dir_name) if not ash_process_has_started: raise RuntimeError('Timed out waiting for ash-chrome to start') + ash_elapsed_time = time.monotonic() - ash_start_time + logging.info('Started ash-chrome in %.3fs on try %d.', ash_elapsed_time, + num_tries) + # Starts tests. if enable_mojo_crosapi: forward_args.append(lacros_mojo_socket_arg) - reason_of_jobs_1 = ( - 'multiple clients crosapi is not supported yet (crbug.com/1124490), ' - 'lacros_chrome_browsertests has to run tests serially') - - if any('--test-launcher-jobs' in arg for arg in forward_args): - raise RuntimeError( - 'Specifying "--test-launcher-jobs" is not allowed because %s. ' - 'Please remove it and this script will automatically append ' - '"--test-launcher-jobs=1"' % reason_of_jobs_1) - - # TODO(crbug.com/1124490): Run lacros_chrome_browsertests in parallel once - # the bug is fixed. - logging.warning('Appending "--test-launcher-jobs=1" because %s', - reason_of_jobs_1) - forward_args.append('--test-launcher-jobs=1') - + forward_args.append('--ash-chrome-path=' + ash_chrome_file) test_env = os.environ.copy() + test_env['WAYLAND_DISPLAY'] = ash_wayland_socket_name test_env['EGL_PLATFORM'] = 'surfaceless' test_env['XDG_RUNTIME_DIR'] = tmp_xdg_dir_name - test_process = subprocess.Popen([args.command] + forward_args, env=test_env) + + if run_tests_in_debugger: + test_process = _LaunchDebugger(args, forward_args, test_env) + else: + logging.info('Starting test process.') + test_process = subprocess.Popen([args.command] + forward_args, + env=test_env, + stdout=test_stdout, + stderr=subprocess.STDOUT) + if should_symbolize: + logging.info('Symbolizing test logs with asan symbolizer.') + test_symbolize_process = subprocess.Popen([_ASAN_SYMBOLIZER_PATH], + stdin=test_process.stdout) + # Allow test_process to receive a SIGPIPE if symbolize process exits. + test_process.stdout.close() return test_process.wait() finally: - if ash_process and ash_process.poll() is None: - ash_process.terminate() - # Allow process to do cleanup and exit gracefully before killing. - time.sleep(0.5) - ash_process.kill() + _KillNicely(ash_process) + # Give symbolizer processes time to finish writing with first_wait_secs. + _KillNicely(ash_symbolize_process, first_wait_secs=1) + _KillNicely(test_symbolize_process, first_wait_secs=1) shutil.rmtree(tmp_xdg_dir_name, ignore_errors=True) shutil.rmtree(tmp_ash_data_dir_name, ignore_errors=True) @@ -371,17 +713,14 @@ def _RunTestDirectly(args, forward_args): """Runs tests by invoking the test command directly. args (dict): Args for this script. - forward_args (dict): Args to be forwarded to the test command. + forward_args (list): Args to be forwarded to the test command. """ try: p = None p = subprocess.Popen([args.command] + forward_args) return p.wait() finally: - if p and p.poll() is None: - p.terminate() - time.sleep(0.5) - p.kill() + _KillNicely(p) def _HandleSignal(sig, _): @@ -400,11 +739,21 @@ def _HandleSignal(sig, _): sys.exit(128 + sig) +def _ExpandFilterFileIfNeeded(test_target, forward_args): + if (test_target in _DEFAULT_FILTER_FILES_MAPPING.keys() and not any( + [arg.startswith('--test-launcher-filter-file') for arg in forward_args])): + file_path = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..', '..', 'testing', + 'buildbot', 'filters', + _DEFAULT_FILTER_FILES_MAPPING[test_target])) + forward_args.append(f'--test-launcher-filter-file={file_path}') + + def _RunTest(args, forward_args): """Runs tests with given args. args (dict): Args for this script. - forward_args (dict): Args to be forwarded to the test command. + forward_args (list): Args to be forwarded to the test command. Raises: RuntimeError: If the given test binary doesn't exist or the test runner @@ -415,13 +764,15 @@ def _RunTest(args, forward_args): raise RuntimeError('Specified test command: "%s" doesn\'t exist' % args.command) + test_target = os.path.basename(args.command) + _ExpandFilterFileIfNeeded(test_target, forward_args) + # |_TARGETS_REQUIRE_ASH_CHROME| may not always be accurate as it is updated # with a best effort only, therefore, allow the invoker to override the # behavior with a specified ash-chrome version, which makes sure that # automated CI/CQ builders would always work correctly. requires_ash_chrome = any( - re.match(t, os.path.basename(args.command)) - for t in _TARGETS_REQUIRE_ASH_CHROME) + re.match(t, test_target) for t in _TARGETS_REQUIRE_ASH_CHROME) if not requires_ash_chrome and not args.ash_chrome_version: return _RunTestDirectly(args, forward_args) @@ -458,7 +809,39 @@ def Main(): version_group.add_argument( '--ash-chrome-path', type=str, - help='Path to an locally built ash-chrome to use for testing.') + help='Path to an locally built ash-chrome to use for testing. ' + 'In general you should build //chrome/test:test_ash_chrome.') + + debugger_group = test_parser.add_mutually_exclusive_group() + debugger_group.add_argument('--gdb', + action='store_true', + help='Run the test in GDB.') + debugger_group.add_argument('--lldb', + action='store_true', + help='Run the test in LLDB.') + + # This is for version skew testing. The current CI/CQ builder builds + # an ash chrome and pass it using --ash-chrome-path. In order to use the same + # builder for version skew testing, we use a new argument to override + # the ash chrome. + test_parser.add_argument( + '--ash-chrome-path-override', + type=str, + help='The same as --ash-chrome-path. But this will override ' + '--ash-chrome-path or --ash-chrome-version if any of these ' + 'arguments exist.') + test_parser.add_argument( + '--ash-logging-path', + type=str, + help='File & path to ash-chrome logging output while running Lacros ' + 'browser tests. If not provided, no output will be generated.') + test_parser.add_argument('--combine-ash-logs-on-bots', + action='store_true', + help='Whether to combine ash logs on bots.') + test_parser.add_argument( + '--asan-symbolize-output', + action='store_true', + help='Whether to run subprocess log outputs through the asan symbolizer.') # This is for version skew testing. The current CI/CQ builder builds # an ash chrome and pass it using --ash-chrome-path. In order to use the same @@ -471,6 +854,11 @@ def Main(): '--ash-chrome-path or --ash-chrome-version if any of these ' 'arguments exist.') args = arg_parser.parse_known_args() + if not hasattr(args[0], "func"): + # No command specified. + print(__doc__) + sys.exit(1) + return args[0].func(args[0], args[1]) diff --git a/build/lacros/test_runner_test.py b/build/lacros/test_runner_test.py index 4e06e0abac61..77f7325f2e3d 100755 --- a/build/lacros/test_runner_test.py +++ b/build/lacros/test_runner_test.py @@ -1,5 +1,5 @@ -#!/usr/bin/env vpython -# Copyright 2020 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -25,6 +25,26 @@ def setUp(self): def tearDown(self): logging.disable(logging.NOTSET) + @mock.patch.object(os.path, + 'dirname', + return_value='chromium/src/build/lacros') + def test_expand_filter_file(self, _): + args = ['--some_flag="flag"'] + test_runner._ExpandFilterFileIfNeeded('browser_tests', args) + self.assertTrue(args[1].endswith( + 'chromium/src/' + 'testing/buildbot/filters/linux-lacros.browser_tests.filter')) + self.assertTrue(args[1].startswith('--test-launcher-filter-file=')) + + args = ['--some_flag="flag"'] + test_runner._ExpandFilterFileIfNeeded('random_tests', args) + self.assertEqual(len(args), 1) + + args = ['--test-launcher-filter-file=new/filter'] + test_runner._ExpandFilterFileIfNeeded('browser_tests', args) + self.assertEqual(len(args), 1) + self.assertTrue(args[0].endswith('new/filter')) + @parameterized.expand([ 'url_unittests', './url_unittests', @@ -46,20 +66,20 @@ def test_do_not_require_ash_chrome(self, command, mock_popen, mock_download, self.assertFalse(mock_download.called) @parameterized.expand([ - 'browser_tests', - 'components_browsertests', - 'content_browsertests', + 'browser_tests', 'components_browsertests', 'content_browsertests', 'lacros_chrome_browsertests', + 'browser_tests --enable-pixel-output-in-tests' ]) @mock.patch.object(os, 'listdir', - return_value=['wayland-0', 'wayland-0.lock']) + return_value=['wayland-exo', 'wayland-exo.lock']) @mock.patch.object(tempfile, 'mkdtemp', side_effect=['/tmp/xdg', '/tmp/ash-data']) @mock.patch.object(os.environ, 'copy', side_effect=[{}, {}]) @mock.patch.object(os.path, 'exists', return_value=True) @mock.patch.object(os.path, 'isfile', return_value=True) + @mock.patch.object(os.path, 'abspath', return_value='/a/b/filter') @mock.patch.object(test_runner, '_GetLatestVersionOfAshChrome', return_value='793554') @@ -68,7 +88,9 @@ def test_do_not_require_ash_chrome(self, command, mock_popen, mock_download, # Tests that the test runner downloads and spawns ash-chrome if ash-chrome is # required. def test_require_ash_chrome(self, command, mock_popen, mock_download, *_): - args = ['script_name', 'test', command] + command_parts = command.split() + args = ['script_name', 'test'] + args.extend(command_parts) with mock.patch.object(sys, 'argv', args): test_runner.Main() mock_download.assert_called_with('793554') @@ -76,13 +98,21 @@ def test_require_ash_chrome(self, command, mock_popen, mock_download, *_): ash_chrome_args = mock_popen.call_args_list[0][0][0] self.assertTrue(ash_chrome_args[0].endswith( - 'build/lacros/prebuilt_ash_chrome/793554/chrome')) + 'build/lacros/prebuilt_ash_chrome/793554/test_ash_chrome')) expected_ash_chrome_args = [ '--user-data-dir=/tmp/ash-data', '--enable-wayland-server', '--no-startup-window', - '--use-fake-ml-service-for-test', + '--disable-input-event-activation-protection', + '--disable-lacros-keep-alive', + '--disable-login-lacros-opening', + '--enable-field-trial-config', + '--enable-features=LacrosSupport,LacrosPrimary,LacrosOnly', + '--ash-ready-file-path=/tmp/ash-data/ash_ready.txt', + '--wayland-server-socket=wayland-exo', ] + if '--enable-pixel-output-in-tests' not in command_parts: + expected_ash_chrome_args.append('--disable-gl-drawing-for-tests') if command == 'lacros_chrome_browsertests': expected_ash_chrome_args.append( '--lacros-mojo-socket-for-testing=/tmp/ash-data/lacros.sock') @@ -94,23 +124,24 @@ def test_require_ash_chrome(self, command, mock_popen, mock_download, *_): if command == 'lacros_chrome_browsertests': self.assertListEqual([ command, + '--test-launcher-filter-file=/a/b/filter', '--lacros-mojo-socket-for-testing=/tmp/ash-data/lacros.sock', - '--test-launcher-jobs=1' + '--ash-chrome-path=' + ash_chrome_args[0], ], test_args) else: - self.assertListEqual([command], test_args) + self.assertListEqual(test_args[:len(command_parts)], command_parts) test_env = mock_popen.call_args_list[1][1].get('env', {}) self.assertDictEqual( { + 'WAYLAND_DISPLAY': 'wayland-exo', 'XDG_RUNTIME_DIR': '/tmp/xdg', 'EGL_PLATFORM': 'surfaceless' }, test_env) - @mock.patch.object(os, 'listdir', - return_value=['wayland-0', 'wayland-0.lock']) + return_value=['wayland-exo', 'wayland-exo.lock']) @mock.patch.object(os.path, 'exists', return_value=True) @mock.patch.object(os.path, 'isfile', return_value=True) @mock.patch.object(test_runner, @@ -130,7 +161,7 @@ def test_specify_ash_chrome_version(self, mock_popen, mock_download, *_): @mock.patch.object(os, 'listdir', - return_value=['wayland-0', 'wayland-0.lock']) + return_value=['wayland-exo', 'wayland-exo.lock']) @mock.patch.object(os.path, 'exists', return_value=True) @mock.patch.object(os.path, 'isfile', return_value=True) @mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary') @@ -151,7 +182,7 @@ def test_overrides_do_not_require_ash_chrome(self, mock_popen, mock_download, @mock.patch.object(os, 'listdir', - return_value=['wayland-0', 'wayland-0.lock']) + return_value=['wayland-exo', 'wayland-exo.lock']) @mock.patch.object(os.path, 'exists', return_value=True) @mock.patch.object(os.path, 'isfile', return_value=True) @mock.patch.object(test_runner, '_GetLatestVersionOfAshChrome') @@ -166,7 +197,7 @@ def test_specify_ash_chrome_path(self, mock_popen, mock_download, 'test', 'browser_tests', '--ash-chrome-path', - '/ash/chrome', + '/ash/test_ash_chrome', ] with mock.patch.object(sys, 'argv', args): test_runner.Main() @@ -188,6 +219,82 @@ def test_command_arguments(self, mock_popen, mock_download, _): ['./url_unittests', '--gtest_filter=Suite.Test']) self.assertFalse(mock_download.called) + @mock.patch.dict(os.environ, {'ASH_WRAPPER': 'gdb --args'}, clear=False) + @mock.patch.object(os, + 'listdir', + return_value=['wayland-exo', 'wayland-exo.lock']) + @mock.patch.object(tempfile, + 'mkdtemp', + side_effect=['/tmp/xdg', '/tmp/ash-data']) + @mock.patch.object(os.environ, 'copy', side_effect=[{}, {}]) + @mock.patch.object(os.path, 'exists', return_value=True) + @mock.patch.object(os.path, 'isfile', return_value=True) + @mock.patch.object(test_runner, + '_GetLatestVersionOfAshChrome', + return_value='793554') + @mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary') + @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock()) + # Tests that, when the ASH_WRAPPER environment variable is set, it forwards + # the commands to the invocation of ash. + def test_ash_wrapper(self, mock_popen, *_): + args = [ + 'script_name', 'test', './browser_tests', '--gtest_filter=Suite.Test' + ] + with mock.patch.object(sys, 'argv', args): + test_runner.Main() + ash_args = mock_popen.call_args_list[0][0][0] + self.assertTrue(ash_args[2].endswith('test_ash_chrome')) + self.assertEqual(['gdb', '--args'], ash_args[:2]) + + + # Test when ash is newer, test runner skips running tests and returns 0. + @mock.patch.object(os.path, 'exists', return_value=True) + @mock.patch.object(os.path, 'isfile', return_value=True) + @mock.patch.object(test_runner, '_FindLacrosMajorVersion', return_value=91) + def test_version_skew_ash_newer(self, *_): + args = [ + 'script_name', 'test', './browser_tests', '--gtest_filter=Suite.Test', + '--ash-chrome-path-override=\ +lacros_version_skew_tests_v92.0.100.0/test_ash_chrome' + ] + with mock.patch.object(sys, 'argv', args): + self.assertEqual(test_runner.Main(), 0) + + @mock.patch.object(os.path, 'exists', return_value=True) + def test_lacros_version_from_chrome_version(self, *_): + version_data = '''\ +MAJOR=95 +MINOR=0 +BUILD=4615 +PATCH=0\ +''' + open_lib = '__builtin__.open' + if sys.version_info[0] >= 3: + open_lib = 'builtins.open' + with mock.patch(open_lib, + mock.mock_open(read_data=version_data)) as mock_file: + version = test_runner._FindLacrosMajorVersion() + self.assertEqual(95, version) + + @mock.patch.object(os.path, 'exists', return_value=True) + def test_lacros_version_from_metadata(self, *_): + metadata_json = ''' +{ + "content": { + "version": "92.1.4389.2" + }, + "metadata_version": 1 +} + ''' + open_lib = '__builtin__.open' + if sys.version_info[0] >= 3: + open_lib = 'builtins.open' + with mock.patch(open_lib, + mock.mock_open(read_data=metadata_json)) as mock_file: + version = test_runner._FindLacrosMajorVersionFromMetadata() + self.assertEqual(92, version) + mock_file.assert_called_with('metadata.json', 'r') + if __name__ == '__main__': unittest.main() diff --git a/build/landmine_utils.py b/build/landmine_utils.py index a3f21ff1b836..b126f4fff0fa 100644 --- a/build/landmine_utils.py +++ b/build/landmine_utils.py @@ -1,4 +1,4 @@ -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/landmines.py b/build/landmines.py index f5adf80f92e9..844ee3808c60 100755 --- a/build/landmines.py +++ b/build/landmines.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/linux/BUILD.gn b/build/linux/BUILD.gn index 54314c768784..b298abb709b2 100644 --- a/build/linux/BUILD.gn +++ b/build/linux/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright (c) 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -17,12 +17,13 @@ if (use_gio) { # Looking for libspeechd? Use //third_party/speech-dispatcher if (use_system_freetype) { - assert(!is_chromecast) + assert(!is_castos) # Only provided for distributions which prefer to keep linking to FreeType on # the system, use with caution,for details see build/config/freetype/BUILD.gn. pkg_config("freetype_from_pkgconfig") { visibility = [ + "//build/config/freetype:freetype", "//third_party:freetype_harfbuzz", "//third_party/harfbuzz-ng:harfbuzz_source", ] diff --git a/build/linux/chrome.map b/build/linux/chrome.map index 718796f8c456..3038318821dd 100644 --- a/build/linux/chrome.map +++ b/build/linux/chrome.map @@ -22,6 +22,7 @@ global: # Memory allocation symbols. We want chrome and any libraries to # share the same heap, so it is correct to export these symbols. + aligned_alloc; calloc; cfree; free; @@ -82,6 +83,13 @@ global: localtime64_r; localtime_r; + # getaddrinfo() is exported by the sandbox to ensure the network service and + # other sandboxed processes don't try to run system DNS resolution + # in-process, which is not supported by the sandbox. This override + # uses dlsym(getaddrinfo) to make the real calls in unsandboxed + # processes. + getaddrinfo; + v8dbg_*; local: diff --git a/build/linux/dump_app_syms.py b/build/linux/dump_app_syms.py index f156baf3b92c..ca2d700e272a 100644 --- a/build/linux/dump_app_syms.py +++ b/build/linux/dump_app_syms.py @@ -1,11 +1,10 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # Helper script to run dump_syms on Chrome Linux executables and strip # them if needed. -from __future__ import print_function import os import subprocess @@ -25,7 +24,7 @@ if not os.path.isfile(outfile) or \ os.stat(outfile).st_mtime < os.stat(infile).st_mtime: with open(outfile, 'w') as outfileobj: - subprocess.check_call([dumpsyms, infile], stdout=outfileobj) + subprocess.check_call([dumpsyms, '-m', '-d', infile], stdout=outfileobj) if strip_binary != '0': subprocess.check_call(['strip', infile]) diff --git a/build/linux/extract_symbols.gni b/build/linux/extract_symbols.gni index 722f60d23b72..8fef1312d825 100644 --- a/build/linux/extract_symbols.gni +++ b/build/linux/extract_symbols.gni @@ -1,4 +1,4 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -22,6 +22,7 @@ template("extract_symbols") { dump_syms_binary = get_label_info(dump_syms_label, "root_out_dir") + "/" + "dump_syms" + pool = "//build/toolchain:link_pool($default_toolchain)" script = "//build/linux/dump_app_syms.py" inputs = [ invoker.binary, diff --git a/build/linux/install-chromeos-fonts.py b/build/linux/install-chromeos-fonts.py index da8fb40f48d5..8ac242389f3c 100755 --- a/build/linux/install-chromeos-fonts.py +++ b/build/linux/install-chromeos-fonts.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2013 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -7,7 +7,6 @@ # This script can be run manually (as root), but is also run as part # install-build-deps.sh. -from __future__ import print_function import os import shutil diff --git a/build/linux/libpci/BUILD.gn b/build/linux/libpci/BUILD.gn index 2d1e267e1264..a6abfdef0511 100644 --- a/build/linux/libpci/BUILD.gn +++ b/build/linux/libpci/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/linux/libudev/BUILD.gn b/build/linux/libudev/BUILD.gn index dcd9f234dbe4..312b092d8485 100644 --- a/build/linux/libudev/BUILD.gn +++ b/build/linux/libudev/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -10,6 +10,7 @@ libudev_functions = [ "udev_device_get_devtype", "udev_device_get_parent", "udev_device_get_parent_with_subsystem_devtype", + "udev_device_get_properties_list_entry", "udev_device_get_property_value", "udev_device_get_subsystem", "udev_device_get_sysattr_value", diff --git a/build/linux/rewrite_dirs.py b/build/linux/rewrite_dirs.py index 17659c3d3cae..d94ef53f9dca 100755 --- a/build/linux/rewrite_dirs.py +++ b/build/linux/rewrite_dirs.py @@ -1,11 +1,10 @@ -#!/usr/bin/env python -# Copyright (c) 2011 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2011 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Rewrites paths in -I, -L and other option to be relative to a sysroot.""" -from __future__ import print_function import sys import os diff --git a/build/linux/strip_binary.gni b/build/linux/strip_binary.gni index ddc42cc4e08a..3675d39a5481 100644 --- a/build/linux/strip_binary.gni +++ b/build/linux/strip_binary.gni @@ -1,4 +1,4 @@ -# Copyright 2021 The Chromium Authors. All rights reserved. +# Copyright 2021 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -22,21 +22,33 @@ template("strip_binary") { action("${target_name}") { eu_strip_binary = "//buildtools/third_party/eu-strip/bin/eu-strip" script = "//build/linux/strip_binary.py" + + if (defined(invoker.stripped_binary_output)) { + stripped_binary_output = invoker.stripped_binary_output + } else { + stripped_binary_output = invoker.binary_input + ".stripped" + } + if (defined(invoker.symbol_output)) { + symbol_output = invoker.symbol_output + } else { + symbol_output = invoker.binary_input + ".debug" + } + inputs = [ invoker.binary_input, eu_strip_binary, ] outputs = [ - invoker.symbol_output, - invoker.stripped_binary_output, + symbol_output, + stripped_binary_output, ] args = [ "--eu-strip-binary-path", rebase_path(eu_strip_binary, root_build_dir), "--symbol-output", - rebase_path(invoker.symbol_output, root_build_dir), + rebase_path(symbol_output, root_build_dir), "--stripped-binary-output", - rebase_path(invoker.stripped_binary_output, root_build_dir), + rebase_path(stripped_binary_output, root_build_dir), "--binary-input", rebase_path(invoker.binary_input, root_build_dir), ] diff --git a/build/linux/strip_binary.py b/build/linux/strip_binary.py index 00b4089e769f..82801c7486a3 100755 --- a/build/linux/strip_binary.py +++ b/build/linux/strip_binary.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # -# Copyright 2021 The Chromium Authors. All rights reserved. +# Copyright 2021 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -24,7 +24,7 @@ def main(): ] process = subprocess.Popen(cmd_line) - + process.wait() return process.returncode diff --git a/build/linux/sysroot_ld_path.sh b/build/linux/sysroot_ld_path.sh deleted file mode 100755 index 623d47b86154..000000000000 --- a/build/linux/sysroot_ld_path.sh +++ /dev/null @@ -1,99 +0,0 @@ -#!/bin/sh -# Copyright (c) 2013 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Reads etc/ld.so.conf and/or etc/ld.so.conf.d/*.conf and returns the -# appropriate linker flags. -# -# sysroot_ld_path.sh /abspath/to/sysroot -# - -log_error_and_exit() { - echo $0: $@ - exit 1 -} - -process_entry() { - if [ -z "$1" ] || [ -z "$2" ]; then - log_error_and_exit "bad arguments to process_entry()" - fi - local root="$1" - local localpath="$2" - - echo $localpath | grep -qs '^/' - if [ $? -ne 0 ]; then - log_error_and_exit $localpath does not start with / - fi - local entry="$root$localpath" - echo $entry -} - -process_ld_so_conf() { - if [ -z "$1" ] || [ -z "$2" ]; then - log_error_and_exit "bad arguments to process_ld_so_conf()" - fi - local root="$1" - local ld_so_conf="$2" - - # ld.so.conf may include relative include paths. pushd is a bashism. - local saved_pwd=$(pwd) - cd $(dirname "$ld_so_conf") - - cat "$ld_so_conf" | \ - while read ENTRY; do - echo "$ENTRY" | grep -qs ^include - if [ $? -eq 0 ]; then - local included_files=$(echo "$ENTRY" | sed 's/^include //') - echo "$included_files" | grep -qs ^/ - if [ $? -eq 0 ]; then - if ls $root$included_files >/dev/null 2>&1 ; then - for inc_file in $root$included_files; do - process_ld_so_conf "$root" "$inc_file" - done - fi - else - if ls $(pwd)/$included_files >/dev/null 2>&1 ; then - for inc_file in $(pwd)/$included_files; do - process_ld_so_conf "$root" "$inc_file" - done - fi - fi - continue - fi - - echo "$ENTRY" | grep -qs ^/ - if [ $? -eq 0 ]; then - process_entry "$root" "$ENTRY" - fi - done - - # popd is a bashism - cd "$saved_pwd" -} - -# Main - -if [ $# -ne 1 ]; then - echo Usage $0 /abspath/to/sysroot - exit 1 -fi - -echo $1 | grep -qs ' ' -if [ $? -eq 0 ]; then - log_error_and_exit $1 contains whitespace. -fi - -LD_SO_CONF="$1/etc/ld.so.conf" -LD_SO_CONF_D="$1/etc/ld.so.conf.d" - -if [ -e "$LD_SO_CONF" ]; then - process_ld_so_conf "$1" "$LD_SO_CONF" | xargs echo -elif [ -e "$LD_SO_CONF_D" ]; then - find "$LD_SO_CONF_D" -maxdepth 1 -name '*.conf' -print -quit > /dev/null - if [ $? -eq 0 ]; then - for entry in $LD_SO_CONF_D/*.conf; do - process_ld_so_conf "$1" "$entry" - done | xargs echo - fi -fi diff --git a/build/linux/sysroot_scripts/build_and_upload.py b/build/linux/sysroot_scripts/build_and_upload.py index 1a24da29066e..d7d95e95b2c7 100755 --- a/build/linux/sysroot_scripts/build_and_upload.py +++ b/build/linux/sysroot_scripts/build_and_upload.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2016 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -7,7 +7,6 @@ UploadSysroot for each supported arch of each sysroot creator. """ -from __future__ import print_function import glob import hashlib @@ -37,10 +36,10 @@ def sha1sumfile(filename): def get_proc_output(args): - return subprocess.check_output(args).strip() + return subprocess.check_output(args, encoding='utf-8').strip() -def build_and_upload(script_path, distro, release, arch, lock): +def build_and_upload(script_path, distro, release, key, arch, lock): script_dir = os.path.dirname(os.path.realpath(__file__)) run_script([script_path, 'BuildSysroot' + arch]) @@ -56,13 +55,13 @@ def build_and_upload(script_path, distro, release, arch, lock): 'Tarball': tarball, 'Sha1Sum': sha1sum, 'SysrootDir': sysroot_dir, + 'Key': key, } with lock: - with open(os.path.join(script_dir, 'sysroots.json'), 'rw+') as f: - sysroots = json.load(f) + fname = os.path.join(script_dir, 'sysroots.json') + sysroots = json.load(open(fname)) + with open(fname, 'w') as f: sysroots["%s_%s" % (release, arch.lower())] = sysroot_metadata - f.seek(0) - f.truncate() f.write( json.dumps( sysroots, sort_keys=True, indent=4, separators=(',', ': '))) @@ -77,11 +76,12 @@ def main(): script_path = os.path.join(script_dir, filename) distro = get_proc_output([script_path, 'PrintDistro']) release = get_proc_output([script_path, 'PrintRelease']) + key = get_proc_output([script_path, 'PrintKey']) architectures = get_proc_output([script_path, 'PrintArchitectures']) for arch in architectures.split('\n'): - proc = multiprocessing.Process( - target=build_and_upload, - args=(script_path, distro, release, arch, lock)) + proc = multiprocessing.Process(target=build_and_upload, + args=(script_path, distro, release, key, + arch, lock)) procs.append(("%s %s (%s)" % (distro, release, arch), proc)) proc.start() for _, proc in procs: diff --git a/build/linux/sysroot_scripts/find_incompatible_glibc_symbols.py b/build/linux/sysroot_scripts/find_incompatible_glibc_symbols.py deleted file mode 100755 index 1e0b5f614a35..000000000000 --- a/build/linux/sysroot_scripts/find_incompatible_glibc_symbols.py +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env python -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Find incompatible symbols in glibc and output a list of replacements. -""" - -from __future__ import print_function - -import re -import sys - -# This constant comes from https://crbug.com/580892 -MAX_ALLOWED_GLIBC_VERSION = [2, 17] - - -def get_replacements(nm_file, max_allowed_glibc_version): - version_format = re.compile('GLIBC_[0-9\.]+') - symbols = {} - for line in nm_file: - # Some versions of nm have a bug where the version gets printed twice. - # Since the symbol may either be formatted like "name@@VERSION" or - # "name@@VERSION@@VERSION", handle both cases. - line = line.replace('@@', '@') - symver = line.split('@') - symbol = symver[0].split(' ')[-1] - version = symver[-1] - if not re.match(version_format, version): - continue - if symbol in symbols: - symbols[symbol].add(version) - else: - symbols[symbol] = set([version]) - - replacements = [] - for symbol, versions in symbols.items(): - if len(versions) <= 1: - continue - versions_parsed = [[ - int(part) for part in version.lstrip('GLIBC_').split('.') - ] for version in versions] - if (max(versions_parsed) > max_allowed_glibc_version and - min(versions_parsed) <= max_allowed_glibc_version): - # Use the newest allowed version of the symbol. - replacement_version_parsed = max([ - version for version in versions_parsed - if version <= max_allowed_glibc_version - ]) - replacement_version = 'GLIBC_' + '.'.join( - [str(part) for part in replacement_version_parsed]) - replacements.append('__asm__(".symver %s, %s@%s");' % - (symbol, symbol, replacement_version)) - return sorted(replacements) - - -if __name__ == '__main__': - replacements = get_replacements(sys.stdin, MAX_ALLOWED_GLIBC_VERSION) - if replacements: - print('// Chromium-specific hack.') - print('// See explanation in sysroot-creator.sh.') - for replacement in replacements: - print(replacement) diff --git a/build/linux/sysroot_scripts/find_incompatible_glibc_symbols_unittest.py b/build/linux/sysroot_scripts/find_incompatible_glibc_symbols_unittest.py deleted file mode 100755 index 7c665550684d..000000000000 --- a/build/linux/sysroot_scripts/find_incompatible_glibc_symbols_unittest.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python -# Copyright 2018 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import find_incompatible_glibc_symbols -import sys - -if sys.version_info.major == 2: - from cStringIO import StringIO -else: - from io import StringIO - -NM_DATA = """\ -0000000000000001 W expf@GLIBC_2.2.5 -0000000000000002 W expf@@GLIBC_2.27 -0000000000000003 W foo@@GLIBC_2.2.5 -0000000000000004 W bar@GLIBC_2.2.5 -0000000000000005 W baz@GLIBC_2.2.5 -0000000000000006 T foo2@GLIBC_2.2 -0000000000000007 T foo2@GLIBC_2.3 -0000000000000008 T foo2@GLIBC_2.30 -0000000000000009 T foo2@@GLIBC_2.31 -000000000000000a T bar2@GLIBC_2.30 -000000000000000b T bar2@@GLIBC_2.31 -000000000000000c T baz2@GLIBC_2.2 -000000000000000d T baz2@@GLIBC_2.3 -""" - -EXPECTED_REPLACEMENTS = [ - '__asm__(".symver expf, expf@GLIBC_2.2.5");', - '__asm__(".symver foo2, foo2@GLIBC_2.3");', -] - -nm_file = StringIO() -nm_file.write(NM_DATA) -nm_file.seek(0) - -assert ( - EXPECTED_REPLACEMENTS == find_incompatible_glibc_symbols.get_replacements( - nm_file, [2, 17])) diff --git a/build/linux/sysroot_scripts/generate_debian_archive_unstable_gpg.sh b/build/linux/sysroot_scripts/generate_keyring.sh similarity index 71% rename from build/linux/sysroot_scripts/generate_debian_archive_unstable_gpg.sh rename to build/linux/sysroot_scripts/generate_keyring.sh index ecbdd6431268..7b17730008da 100755 --- a/build/linux/sysroot_scripts/generate_debian_archive_unstable_gpg.sh +++ b/build/linux/sysroot_scripts/generate_keyring.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -9,14 +9,18 @@ set -o errexit SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" KEYS=( + # Debian Archive Automatic Signing Key (11/bullseye) + "73A4F27B8DD47936" + # Debian Security Archive Automatic Signing Key (11/bullseye) + "A48449044AAD5C5D" + # Debian Stable Release Key (11/bullseye) + "605C66F00D6C9793" # Debian Stable Release Key (10/buster) "DCC9EFBF77E11517" # Debian Archive Automatic Signing Key (10/buster) "DC30D7C23CBBABEE" # Debian Security Archive Automatic Signing Key (10/buster) "4DFAB270CAA96DFA" - # Debian Archive Automatic Signing Key (10/buster) - "DC30D7C23CBBABEE" # Jessie Stable Release Key "CBF8D6FD518E17E1" # Debian Archive Automatic Signing Key (7.0/wheezy) @@ -33,5 +37,5 @@ KEYS=( "EF0F382A1A7B6500" ) -gpg --recv-keys ${KEYS[@]} -gpg --output "${SCRIPT_DIR}/debian_archive_unstable.gpg" --export ${KEYS[@]} +gpg --keyserver keyserver.ubuntu.com --recv-keys ${KEYS[@]} +gpg --output "${SCRIPT_DIR}/keyring.gpg" --export ${KEYS[@]} diff --git a/build/linux/sysroot_scripts/generated_package_lists/bullseye.amd64 b/build/linux/sysroot_scripts/generated_package_lists/bullseye.amd64 new file mode 100644 index 000000000000..f66b7aff6f67 --- /dev/null +++ b/build/linux/sysroot_scripts/generated_package_lists/bullseye.amd64 @@ -0,0 +1,411 @@ +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-0_2.38.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-dev_2.38.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-4~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-4~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.44.1-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.44.1-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/attr/libattr1_2.4.48-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/audit/libaudit1_3.0-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-client3_0.8-5+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-common3_0.8-5+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth3_5.55-3.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth-dev_5.55-3.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2_1.16.0-5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/colord/libcolord2_1.4.5-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2_2.3.3op2-3+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl3-gnutls_7.88.1-7~bpo11+2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl4-gnutls-dev_7.88.1-7~bpo11+2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-3_1.12.24-0+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-dev_1.12.24-0+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/double-conversion/libdouble-conversion3_3.1.5-6.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.6-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/libcom-err2_1.46.6-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf1_0.187-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf-dev_0.187-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1_2.2.10-2+deb11u5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1-dev_2.2.10-2+deb11u5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac8_1.3.3-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac-dev_1.3.3-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi0_1.0.8-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libasan6_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-s1_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libitm1_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/liblsan0_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libquadmath0_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libtsan0_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-0_2.66.8-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.8-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6_2.31-13+deb11u5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6-dev_2.31-13+deb11u5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.1-5+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls30_3.7.1-5+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.1-5+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.1-5+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.1-5+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-4+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-4+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-1_4.8.3+ds-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-dev_4.8.3+ds-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu/libicu67_67.1-7_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig-dev_2.1-3.1+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/krb5-multidev_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssrpc4_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libk5crypto3_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkdb5-10_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-3_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-dev_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5support0_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libb2/libb2-1_0.98.1-1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap2_2.44-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap-dev_2.44-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcloudproviders/libcloudproviders0_0.3.0-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate0_1.10-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate-dev_1.10-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-intel1_2.4.104-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy0_1.5.8-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.8-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi7_3.3-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi-dev_3.3-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libice/libice6_1.0.10-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn/libidn11_1.33-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput10_1.16.4-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-4_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-4_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libm/libmd/libmd0_1.0.3-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libproxy/libproxy1v5_0.4.17-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1_3.1-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1_3.1-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsm/libsm6_1.2.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai0_0.1.28-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai-dev_0.1.28-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtool/libltdl7_2.4.6-15_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter0_1.2.1-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter-dev_1.2.1-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva2_2.17.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-dev_2.17.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-drm2_2.17.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-glx2_2.17.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-wayland2_2.17.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-x11-2_2.17.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwacom/libwacom2_1.8-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp6_0.6.1-2.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp7_1.2.4-0.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-6_1.7.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-dev_1.7.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb1_1.7.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau6_1.0.9-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinerama0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinput0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xkb1_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt1_4.4.18-4_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt-dev_4.4.18-4_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi6_1.7.10-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-x11-0_1.0.3-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.7+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.7+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss1_1.2.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt6_1.2.0-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lerc/liblerc4_4.0.0+ds-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/linux/linux-libc-dev_6.1.12-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lz4/liblz4-1_1.9.3-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lzo2/liblzo2-2_2.10-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/md4c/libmd4c0_0.4.7-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa_20.3.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm1_20.3.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm-dev_20.3.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libglapi-mesa_20.3.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/mesa-common-dev_20.3.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip1_1.1-8+b1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libhogweed6_3.7.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libnettle8_3.7.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4_4.29-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4-dev_4.29-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3_3.61-1+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3-dev_3.61-1+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openldap/libldap-2.4-2_2.4.59+dfsg-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl1.1_1.1.1n-0+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl-dev_1.1.1n-0+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus0_1.3.1-0.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g_1.4.0-9+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g-dev_1.4.0-9+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci3_3.7.0-5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-dev_10.36-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre16-3_8.39-13_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre32-3_8.39-13_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3_8.39-13_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.65-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.65-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.65-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1.1~deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1.1~deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse0_14.2-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6concurrent6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6core6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6dbus6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6gui6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6network6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6opengl6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6openglwidgets6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6printsupport6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6sql6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6test6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6widgets6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6xml6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev-tools_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5concurrent5_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5core5a_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5dbus5_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5gui5_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5network5_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5printsupport5_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5sql5_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5test5_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5widgets5_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5xml5_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev-tools_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd2_0.11.4-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.11.4-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd0_252.5-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd-dev_252.5-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev1_252.5-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev-dev_252.5-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff5_4.2.0-1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff6_4.5.0-5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff-dev_4.2.0-1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiffxx5_4.2.0-1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tslib/libts0_1.22-1+b1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/unbound/libunbound8_1.13.1-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid1_2.36.1-8+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid-dev_2.36.1-8+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount1_2.36.1-8+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount-dev_2.36.1-8+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libuuid1_2.36.1-8+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/uuid-dev_2.36.1-8+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/valgrind/valgrind_3.16.1-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan1_1.3.224.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan-dev_1.3.224.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-bin_1.18.0-2~exp1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-client0_1.18.0-2~exp1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-cursor0_1.18.0-2~exp1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-dev_1.18.0-2~exp1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl1_1.18.0-2~exp1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl-backend-dev_1.18.0-2~exp1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-server0_1.18.0-2~exp1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0_0.4.0-1+b3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0-dev_0.4.0-1+b3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-keysyms/libxcb-keysyms1_0.4.0-1+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util1_0.4.0-1+b1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util-dev_0.4.0-1+b1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0_0.3.9-1+b1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0-dev_0.3.9-1+b1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-wm/libxcb-icccm4_0.4.1-1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft2_2.3.2-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft-dev_2.3.2-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xz-utils/liblzma5_5.2.5-2.1~deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2+deb11u2_amd64.deb diff --git a/build/linux/sysroot_scripts/generated_package_lists/bullseye.arm b/build/linux/sysroot_scripts/generated_package_lists/bullseye.arm new file mode 100644 index 000000000000..09b91d6e299d --- /dev/null +++ b/build/linux/sysroot_scripts/generated_package_lists/bullseye.arm @@ -0,0 +1,411 @@ +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-0_2.38.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-dev_2.38.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-4~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-4~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.44.1-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.44.1-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/attr/libattr1_2.4.48-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/audit/libaudit1_3.0-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-client3_0.8-5+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-common3_0.8-5+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth3_5.55-3.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth-dev_5.55-3.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2_1.16.0-5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/colord/libcolord2_1.4.5-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2_2.3.3op2-3+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl3-gnutls_7.88.1-7~bpo11+2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl4-gnutls-dev_7.88.1-7~bpo11+2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-3_1.12.24-0+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-dev_1.12.24-0+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/double-conversion/libdouble-conversion3_3.1.5-6.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.6-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/libcom-err2_1.46.6-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf1_0.187-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf-dev_0.187-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1_2.2.10-2+deb11u5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1-dev_2.2.10-2+deb11u5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac8_1.3.3-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac-dev_1.3.3-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi0_1.0.8-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libasan6_10.2.1-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-s1_10.2.1-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-0_2.66.8-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.8-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6_2.31-13+deb11u5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6-dev_2.31-13+deb11u5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.1-5+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls30_3.7.1-5+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.1-5+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.1-5+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.1-5+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-4+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-4+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-1_4.8.3+ds-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-dev_4.8.3+ds-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu/libicu67_67.1-7_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig-dev_2.1-3.1+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/krb5-multidev_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssrpc4_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libk5crypto3_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkdb5-10_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-3_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-dev_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5support0_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libb2/libb2-1_0.98.1-1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap2_2.44-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap-dev_2.44-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcloudproviders/libcloudproviders0_0.3.0-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate0_1.10-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate-dev_1.10-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-etnaviv1_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-exynos1_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-freedreno1_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-omap1_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-tegra0_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy0_1.5.8-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.8-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi7_3.3-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi-dev_3.3-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libice/libice6_1.0.10-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn/libidn11_1.33-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput10_1.16.4-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-4_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-4_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libm/libmd/libmd0_1.0.3-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libproxy/libproxy1v5_0.4.17-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1_3.1-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1_3.1-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsm/libsm6_1.2.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai0_0.1.28-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai-dev_0.1.28-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtool/libltdl7_2.4.6-15_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter0_1.2.1-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter-dev_1.2.1-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva2_2.17.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-dev_2.17.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-drm2_2.17.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-glx2_2.17.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-wayland2_2.17.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-x11-2_2.17.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwacom/libwacom2_1.8-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp6_0.6.1-2.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp7_1.2.4-0.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-6_1.7.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-dev_1.7.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb1_1.7.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau6_1.0.9-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinerama0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinput0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xkb1_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt1_4.4.18-4_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt-dev_4.4.18-4_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi6_1.7.10-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-x11-0_1.0.3-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.7+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.7+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss1_1.2.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt6_1.2.0-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lerc/liblerc4_4.0.0+ds-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/linux/linux-libc-dev_6.1.12-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lz4/liblz4-1_1.9.3-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lzo2/liblzo2-2_2.10-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/md4c/libmd4c0_0.4.7-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa_20.3.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm1_20.3.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm-dev_20.3.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libglapi-mesa_20.3.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/mesa-common-dev_20.3.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip1_1.1-8+b1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libhogweed6_3.7.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libnettle8_3.7.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4_4.29-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4-dev_4.29-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3_3.61-1+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3-dev_3.61-1+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openldap/libldap-2.4-2_2.4.59+dfsg-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl1.1_1.1.1n-0+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl-dev_1.1.1n-0+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus0_1.3.1-0.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g_1.4.0-9+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g-dev_1.4.0-9+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci3_3.7.0-5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-dev_10.36-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre16-3_8.39-13_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre32-3_8.39-13_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3_8.39-13_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.65-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.65-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.65-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1.1~deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1.1~deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse0_14.2-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6concurrent6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6core6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6dbus6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6gui6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6network6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6opengl6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6openglwidgets6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6printsupport6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6sql6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6test6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6widgets6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6xml6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev-tools_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5concurrent5_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5core5a_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5dbus5_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5gui5_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5network5_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5printsupport5_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5sql5_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5test5_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5widgets5_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5xml5_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev-tools_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd2_0.11.4-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.11.4-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd0_252.5-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd-dev_252.5-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev1_252.5-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev-dev_252.5-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff5_4.2.0-1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff6_4.5.0-5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff-dev_4.2.0-1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiffxx5_4.2.0-1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tslib/libts0_1.22-1+b1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/unbound/libunbound8_1.13.1-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid1_2.36.1-8+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid-dev_2.36.1-8+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount1_2.36.1-8+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount-dev_2.36.1-8+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libuuid1_2.36.1-8+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/uuid-dev_2.36.1-8+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/valgrind/valgrind_3.16.1-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan1_1.3.224.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan-dev_1.3.224.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-bin_1.18.0-2~exp1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-client0_1.18.0-2~exp1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-cursor0_1.18.0-2~exp1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-dev_1.18.0-2~exp1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl1_1.18.0-2~exp1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl-backend-dev_1.18.0-2~exp1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-server0_1.18.0-2~exp1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0_0.4.0-1+b3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0-dev_0.4.0-1+b3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-keysyms/libxcb-keysyms1_0.4.0-1+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util1_0.4.0-1+b1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util-dev_0.4.0-1+b1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0_0.3.9-1+b1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0-dev_0.3.9-1+b1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-wm/libxcb-icccm4_0.4.1-1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft2_2.3.2-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft-dev_2.3.2-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xz-utils/liblzma5_5.2.5-2.1~deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2+deb11u2_armhf.deb diff --git a/build/linux/sysroot_scripts/generated_package_lists/bullseye.arm64 b/build/linux/sysroot_scripts/generated_package_lists/bullseye.arm64 new file mode 100644 index 000000000000..127cf3ff1885 --- /dev/null +++ b/build/linux/sysroot_scripts/generated_package_lists/bullseye.arm64 @@ -0,0 +1,414 @@ +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-0_2.38.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-dev_2.38.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-4~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-4~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.44.1-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.44.1-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/attr/libattr1_2.4.48-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/audit/libaudit1_3.0-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-client3_0.8-5+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-common3_0.8-5+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth3_5.55-3.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth-dev_5.55-3.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2_1.16.0-5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/colord/libcolord2_1.4.5-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2_2.3.3op2-3+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl3-gnutls_7.88.1-7~bpo11+2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl4-gnutls-dev_7.88.1-7~bpo11+2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-3_1.12.24-0+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-dev_1.12.24-0+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/double-conversion/libdouble-conversion3_3.1.5-6.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.6-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/libcom-err2_1.46.6-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf1_0.187-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf-dev_0.187-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1_2.2.10-2+deb11u5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1-dev_2.2.10-2+deb11u5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac8_1.3.3-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac-dev_1.3.3-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi0_1.0.8-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libasan6_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-s1_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libitm1_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/liblsan0_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libtsan0_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-0_2.66.8-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.8-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6_2.31-13+deb11u5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6-dev_2.31-13+deb11u5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.1-5+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls30_3.7.1-5+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.1-5+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.1-5+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.1-5+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-4+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-4+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-1_4.8.3+ds-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-dev_4.8.3+ds-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu/libicu67_67.1-7_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig-dev_2.1-3.1+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/krb5-multidev_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssrpc4_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libk5crypto3_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkdb5-10_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-3_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-dev_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5support0_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libb2/libb2-1_0.98.1-1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap2_2.44-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap-dev_2.44-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcloudproviders/libcloudproviders0_0.3.0-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate0_1.10-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate-dev_1.10-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-etnaviv1_2.4.104-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-freedreno1_2.4.104-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-tegra0_2.4.104-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy0_1.5.8-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.8-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi7_3.3-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi-dev_3.3-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libice/libice6_1.0.10-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn/libidn11_1.33-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput10_1.16.4-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-4_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-4_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libm/libmd/libmd0_1.0.3-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libproxy/libproxy1v5_0.4.17-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1_3.1-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1_3.1-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsm/libsm6_1.2.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai0_0.1.28-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai0_0.1.28-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai-dev_0.1.28-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtool/libltdl7_2.4.6-15_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter0_1.2.1-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter-dev_1.2.1-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva2_2.17.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-dev_2.17.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-drm2_2.17.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-glx2_2.17.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-wayland2_2.17.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-x11-2_2.17.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwacom/libwacom2_1.8-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp6_0.6.1-2.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp7_1.2.4-0.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-6_1.7.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-dev_1.7.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb1_1.7.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau6_1.0.9-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinerama0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinput0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xkb1_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt1_4.4.18-4_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt-dev_4.4.18-4_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi6_1.7.10-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-x11-0_1.0.3-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.7+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.7+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss1_1.2.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt6_1.2.0-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lerc/liblerc4_4.0.0+ds-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/linux/linux-libc-dev_6.1.12-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lz4/liblz4-1_1.9.3-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lzo2/liblzo2-2_2.10-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/md4c/libmd4c0_0.4.7-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa_20.3.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm1_20.3.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm-dev_20.3.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libglapi-mesa_20.3.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/mesa-common-dev_20.3.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip1_1.1-8+b1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libhogweed6_3.7.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libnettle8_3.7.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4_4.29-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4-dev_4.29-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3_3.61-1+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3-dev_3.61-1+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openldap/libldap-2.4-2_2.4.59+dfsg-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl1.1_1.1.1n-0+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl-dev_1.1.1n-0+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus0_1.3.1-0.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g_1.4.0-9+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g-dev_1.4.0-9+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci3_3.7.0-5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-dev_10.36-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre16-3_8.39-13_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre32-3_8.39-13_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3_8.39-13_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.65-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.65-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.65-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1.1~deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1.1~deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse0_14.2-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6concurrent6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6core6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6dbus6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6gui6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6network6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6opengl6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6openglwidgets6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6printsupport6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6sql6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6test6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6widgets6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6xml6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev-tools_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5concurrent5_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5core5a_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5dbus5_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5gui5_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5network5_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5printsupport5_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5sql5_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5test5_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5widgets5_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5xml5_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev-tools_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd2_0.11.4-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.11.4-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd0_252.5-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd-dev_252.5-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev1_252.5-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev-dev_252.5-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff5_4.2.0-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff6_4.5.0-5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff-dev_4.2.0-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiffxx5_4.2.0-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tslib/libts0_1.22-1+b1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/unbound/libunbound8_1.13.1-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid1_2.36.1-8+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid-dev_2.36.1-8+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount1_2.36.1-8+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount-dev_2.36.1-8+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libuuid1_2.36.1-8+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/uuid-dev_2.36.1-8+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/valgrind/valgrind_3.16.1-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan1_1.3.224.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan-dev_1.3.224.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-bin_1.18.0-2~exp1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-client0_1.18.0-2~exp1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-cursor0_1.18.0-2~exp1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-dev_1.18.0-2~exp1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl1_1.18.0-2~exp1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl-backend-dev_1.18.0-2~exp1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-server0_1.18.0-2~exp1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0_0.4.0-1+b3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0-dev_0.4.0-1+b3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-keysyms/libxcb-keysyms1_0.4.0-1+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util1_0.4.0-1+b1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util-dev_0.4.0-1+b1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0_0.3.9-1+b1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0-dev_0.3.9-1+b1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-wm/libxcb-icccm4_0.4.1-1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft2_2.3.2-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft-dev_2.3.2-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xz-utils/liblzma5_5.2.5-2.1~deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2+deb11u2_arm64.deb diff --git a/build/linux/sysroot_scripts/generated_package_lists/bullseye.armel b/build/linux/sysroot_scripts/generated_package_lists/bullseye.armel new file mode 100644 index 000000000000..50e7e1371925 --- /dev/null +++ b/build/linux/sysroot_scripts/generated_package_lists/bullseye.armel @@ -0,0 +1,409 @@ +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-0_2.38.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-dev_2.38.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-4~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-4~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.44.1-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.44.1-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/attr/libattr1_2.4.48-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/audit/libaudit1_3.0-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-client3_0.8-5+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-common3_0.8-5+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth3_5.55-3.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth-dev_5.55-3.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2_1.16.0-5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/colord/libcolord2_1.4.5-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2_2.3.3op2-3+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl3-gnutls_7.88.1-7~bpo11+2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl4-gnutls-dev_7.88.1-7~bpo11+2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-3_1.12.24-0+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-dev_1.12.24-0+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/double-conversion/libdouble-conversion3_3.1.5-6.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.6-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/libcom-err2_1.46.6-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf1_0.187-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf-dev_0.187-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1_2.2.10-2+deb11u5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1-dev_2.2.10-2+deb11u5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac8_1.3.3-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac-dev_1.3.3-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi0_1.0.8-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libasan6_10.2.1-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-s1_10.2.1-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-0_2.66.8-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.8-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6_2.31-13+deb11u5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6-dev_2.31-13+deb11u5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.1-5+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls30_3.7.1-5+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.1-5+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.1-5+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.1-5+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-4+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-4+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-1_4.8.3+ds-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-dev_4.8.3+ds-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu/libicu67_67.1-7_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig-dev_2.1-3.1+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/krb5-multidev_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssrpc4_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libk5crypto3_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkdb5-10_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-3_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-dev_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5support0_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libb2/libb2-1_0.98.1-1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap2_2.44-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap-dev_2.44-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcloudproviders/libcloudproviders0_0.3.0-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate0_1.10-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate-dev_1.10-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-exynos1_2.4.104-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-freedreno1_2.4.104-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-omap1_2.4.104-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-tegra0_2.4.104-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy0_1.5.8-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.8-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi7_3.3-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi-dev_3.3-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libice/libice6_1.0.10-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn/libidn11_1.33-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput10_1.16.4-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-4_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-4_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libm/libmd/libmd0_1.0.3-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libproxy/libproxy1v5_0.4.17-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1_3.1-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1_3.1-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsm/libsm6_1.2.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai0_0.1.28-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai-dev_0.1.28-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtool/libltdl7_2.4.6-15_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter0_1.2.1-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter-dev_1.2.1-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva2_2.17.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-dev_2.17.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-drm2_2.17.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-glx2_2.17.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-wayland2_2.17.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-x11-2_2.17.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwacom/libwacom2_1.8-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp6_0.6.1-2.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp7_1.2.4-0.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-6_1.7.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-dev_1.7.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb1_1.7.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau6_1.0.9-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinerama0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinput0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xkb1_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt1_4.4.18-4_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt-dev_4.4.18-4_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi6_1.7.10-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-x11-0_1.0.3-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.7+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.7+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss1_1.2.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt6_1.2.0-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lerc/liblerc4_4.0.0+ds-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/linux/linux-libc-dev_6.1.12-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lz4/liblz4-1_1.9.3-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lzo2/liblzo2-2_2.10-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/md4c/libmd4c0_0.4.7-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa_20.3.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm1_20.3.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm-dev_20.3.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libglapi-mesa_20.3.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/mesa-common-dev_20.3.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip1_1.1-8+b1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libhogweed6_3.7.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libnettle8_3.7.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4_4.29-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4-dev_4.29-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3_3.61-1+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3-dev_3.61-1+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openldap/libldap-2.4-2_2.4.59+dfsg-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl1.1_1.1.1n-0+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl-dev_1.1.1n-0+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus0_1.3.1-0.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g_1.4.0-9+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g-dev_1.4.0-9+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci3_3.7.0-5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-dev_10.36-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre16-3_8.39-13_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre32-3_8.39-13_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3_8.39-13_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.65-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.65-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.65-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1.1~deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1.1~deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse0_14.2-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6concurrent6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6core6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6dbus6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6gui6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6network6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6opengl6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6openglwidgets6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6printsupport6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6sql6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6test6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6widgets6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6xml6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev-tools_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5concurrent5_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5core5a_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5dbus5_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5gui5_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5network5_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5printsupport5_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5sql5_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5test5_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5widgets5_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5xml5_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev-tools_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd2_0.11.4-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.11.4-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd0_252.5-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd-dev_252.5-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev1_252.5-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev-dev_252.5-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff5_4.2.0-1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff6_4.5.0-5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff-dev_4.2.0-1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiffxx5_4.2.0-1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tslib/libts0_1.22-1+b1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/unbound/libunbound8_1.13.1-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid1_2.36.1-8+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid-dev_2.36.1-8+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount1_2.36.1-8+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount-dev_2.36.1-8+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libuuid1_2.36.1-8+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/uuid-dev_2.36.1-8+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan1_1.3.224.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan-dev_1.3.224.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-bin_1.18.0-2~exp1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-client0_1.18.0-2~exp1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-cursor0_1.18.0-2~exp1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-dev_1.18.0-2~exp1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl1_1.18.0-2~exp1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl-backend-dev_1.18.0-2~exp1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-server0_1.18.0-2~exp1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0_0.4.0-1+b3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0-dev_0.4.0-1+b3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-keysyms/libxcb-keysyms1_0.4.0-1+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util1_0.4.0-1+b1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util-dev_0.4.0-1+b1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0_0.3.9-1+b1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0-dev_0.3.9-1+b1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-wm/libxcb-icccm4_0.4.1-1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft2_2.3.2-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft-dev_2.3.2-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xz-utils/liblzma5_5.2.5-2.1~deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2+deb11u2_armel.deb diff --git a/build/linux/sysroot_scripts/generated_package_lists/bullseye.i386 b/build/linux/sysroot_scripts/generated_package_lists/bullseye.i386 new file mode 100644 index 000000000000..220dd7db9729 --- /dev/null +++ b/build/linux/sysroot_scripts/generated_package_lists/bullseye.i386 @@ -0,0 +1,409 @@ +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-0_2.38.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-dev_2.38.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-4~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-4~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.44.1-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.44.1-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/attr/libattr1_2.4.48-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/audit/libaudit1_3.0-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-client3_0.8-5+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-common3_0.8-5+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth3_5.55-3.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth-dev_5.55-3.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2_1.16.0-5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/colord/libcolord2_1.4.5-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2_2.3.3op2-3+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl3-gnutls_7.88.1-7~bpo11+2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl4-gnutls-dev_7.88.1-7~bpo11+2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-3_1.12.24-0+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-dev_1.12.24-0+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/double-conversion/libdouble-conversion3_3.1.5-6.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.6-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/libcom-err2_1.46.6-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf1_0.187-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf-dev_0.187-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1_2.2.10-2+deb11u5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1-dev_2.2.10-2+deb11u5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac8_1.3.3-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac-dev_1.3.3-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi0_1.0.8-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libasan6_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-s1_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libitm1_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libquadmath0_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-0_2.66.8-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.8-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6_2.31-13+deb11u5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6-dev_2.31-13+deb11u5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.1-5+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls30_3.7.1-5+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.1-5+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.1-5+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.1-5+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-4+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-4+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-1_4.8.3+ds-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-dev_4.8.3+ds-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu/libicu67_67.1-7_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig-dev_2.1-3.1+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/krb5-multidev_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssrpc4_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libk5crypto3_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkdb5-10_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-3_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-dev_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5support0_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libb2/libb2-1_0.98.1-1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap2_2.44-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap-dev_2.44-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcloudproviders/libcloudproviders0_0.3.0-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate0_1.10-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate-dev_1.10-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-intel1_2.4.104-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy0_1.5.8-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.8-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi7_3.3-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi-dev_3.3-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libice/libice6_1.0.10-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn/libidn11_1.33-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput10_1.16.4-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-4_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-4_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libm/libmd/libmd0_1.0.3-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libproxy/libproxy1v5_0.4.17-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1_3.1-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1_3.1-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsm/libsm6_1.2.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai0_0.1.28-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai-dev_0.1.28-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtool/libltdl7_2.4.6-15_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter0_1.2.1-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter-dev_1.2.1-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva2_2.17.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-dev_2.17.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-drm2_2.17.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-glx2_2.17.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-wayland2_2.17.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-x11-2_2.17.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwacom/libwacom2_1.8-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp6_0.6.1-2.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp7_1.2.4-0.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-6_1.7.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-dev_1.7.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb1_1.7.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau6_1.0.9-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinerama0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinput0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xkb1_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt1_4.4.18-4_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt-dev_4.4.18-4_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi6_1.7.10-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-x11-0_1.0.3-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.7+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.7+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss1_1.2.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt6_1.2.0-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lerc/liblerc4_4.0.0+ds-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/linux/linux-libc-dev_6.1.12-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lz4/liblz4-1_1.9.3-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lzo2/liblzo2-2_2.10-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/md4c/libmd4c0_0.4.7-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa_20.3.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm1_20.3.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm-dev_20.3.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libglapi-mesa_20.3.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/mesa-common-dev_20.3.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip1_1.1-8+b1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libhogweed6_3.7.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libnettle8_3.7.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4_4.29-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4-dev_4.29-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3_3.61-1+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3-dev_3.61-1+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openldap/libldap-2.4-2_2.4.59+dfsg-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl1.1_1.1.1n-0+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl-dev_1.1.1n-0+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus0_1.3.1-0.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g_1.4.0-9+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g-dev_1.4.0-9+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci3_3.7.0-5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-dev_10.36-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre16-3_8.39-13_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre32-3_8.39-13_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3_8.39-13_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.65-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.65-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.65-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1.1~deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1.1~deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse0_14.2-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6concurrent6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6core6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6dbus6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6gui6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6network6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6opengl6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6openglwidgets6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6printsupport6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6sql6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6test6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6widgets6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6xml6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev-tools_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5concurrent5_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5core5a_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5dbus5_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5gui5_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5network5_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5printsupport5_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5sql5_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5test5_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5widgets5_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5xml5_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev-tools_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd2_0.11.4-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.11.4-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd0_252.5-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd-dev_252.5-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev1_252.5-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev-dev_252.5-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff5_4.2.0-1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff6_4.5.0-5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff-dev_4.2.0-1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiffxx5_4.2.0-1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tslib/libts0_1.22-1+b1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/unbound/libunbound8_1.13.1-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid1_2.36.1-8+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid-dev_2.36.1-8+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount1_2.36.1-8+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount-dev_2.36.1-8+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libuuid1_2.36.1-8+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/uuid-dev_2.36.1-8+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/valgrind/valgrind_3.16.1-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan1_1.3.224.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan-dev_1.3.224.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-bin_1.18.0-2~exp1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-client0_1.18.0-2~exp1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-cursor0_1.18.0-2~exp1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-dev_1.18.0-2~exp1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl1_1.18.0-2~exp1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl-backend-dev_1.18.0-2~exp1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-server0_1.18.0-2~exp1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0_0.4.0-1+b3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0-dev_0.4.0-1+b3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-keysyms/libxcb-keysyms1_0.4.0-1+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util1_0.4.0-1+b1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util-dev_0.4.0-1+b1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0_0.3.9-1+b1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0-dev_0.3.9-1+b1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-wm/libxcb-icccm4_0.4.1-1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft2_2.3.2-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft-dev_2.3.2-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xz-utils/liblzma5_5.2.5-2.1~deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2+deb11u2_i386.deb diff --git a/build/linux/sysroot_scripts/generated_package_lists/bullseye.mips64el b/build/linux/sysroot_scripts/generated_package_lists/bullseye.mips64el new file mode 100644 index 000000000000..de6da06f9c31 --- /dev/null +++ b/build/linux/sysroot_scripts/generated_package_lists/bullseye.mips64el @@ -0,0 +1,404 @@ +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-0_2.38.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-dev_2.38.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-4~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-4~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.44.1-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.44.1-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/attr/libattr1_2.4.48-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/audit/libaudit1_3.0-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-client3_0.8-5+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-common3_0.8-5+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth3_5.55-3.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth-dev_5.55-3.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2_1.16.0-5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/colord/libcolord2_1.4.5-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2_2.3.3op2-3+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl3-gnutls_7.88.1-7~bpo11+2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl4-gnutls-dev_7.88.1-7~bpo11+2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-3_1.12.24-0+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-dev_1.12.24-0+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/double-conversion/libdouble-conversion3_3.1.5-6.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.6-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/libcom-err2_1.46.6-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf1_0.187-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf-dev_0.187-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1_2.2.10-2+deb11u5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1-dev_2.2.10-2+deb11u5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac8_1.3.3-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac-dev_1.3.3-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi0_1.0.8-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-s1_10.2.1-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-0_2.66.8-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.8-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6_2.31-13+deb11u5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6-dev_2.31-13+deb11u5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.1-5+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls30_3.7.1-5+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.1-5+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.1-5+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.1-5+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-4+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-4+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-1_4.8.3+ds-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-dev_4.8.3+ds-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu/libicu67_67.1-7_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig-dev_2.1-3.1+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/krb5-multidev_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssrpc4_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libk5crypto3_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkdb5-10_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-3_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-dev_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5support0_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libb2/libb2-1_0.98.1-1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap2_2.44-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap-dev_2.44-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcloudproviders/libcloudproviders0_0.3.0-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate0_1.10-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate-dev_1.10-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy0_1.5.8-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.8-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi7_3.3-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi-dev_3.3-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libice/libice6_1.0.10-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn/libidn11_1.33-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput10_1.16.4-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-4_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-4_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libm/libmd/libmd0_1.0.3-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libproxy/libproxy1v5_0.4.17-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1_3.1-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1_3.1-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsm/libsm6_1.2.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai0_0.1.28-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai-dev_0.1.28-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtool/libltdl7_2.4.6-15_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter0_1.2.1-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter-dev_1.2.1-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva2_2.17.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-dev_2.17.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-drm2_2.17.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-glx2_2.17.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-wayland2_2.17.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-x11-2_2.17.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwacom/libwacom2_1.8-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp6_0.6.1-2.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp7_1.2.4-0.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-6_1.7.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-dev_1.7.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb1_1.7.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau6_1.0.9-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinerama0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinput0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xkb1_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt1_4.4.18-4_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt-dev_4.4.18-4_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi6_1.7.10-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-x11-0_1.0.3-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.7+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.7+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss1_1.2.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt6_1.2.0-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lerc/liblerc4_4.0.0+ds-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/linux/linux-libc-dev_5.19.11-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lz4/liblz4-1_1.9.3-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lzo2/liblzo2-2_2.10-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/md4c/libmd4c0_0.4.7-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa_20.3.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm1_20.3.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm-dev_20.3.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libglapi-mesa_20.3.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/mesa-common-dev_20.3.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip1_1.1-8+b1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libhogweed6_3.7.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libnettle8_3.7.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4_4.29-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4-dev_4.29-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3_3.61-1+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3-dev_3.61-1+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openldap/libldap-2.4-2_2.4.59+dfsg-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl1.1_1.1.1n-0+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl-dev_1.1.1n-0+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus0_1.3.1-0.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g_1.4.0-9+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g-dev_1.4.0-9+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci3_3.7.0-5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-dev_10.36-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre16-3_8.39-13_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre32-3_8.39-13_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3_8.39-13_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.65-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.65-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.65-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1.1~deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1.1~deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse0_14.2-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6concurrent6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6core6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6dbus6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6gui6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6network6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6opengl6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6openglwidgets6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6printsupport6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6sql6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6test6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6widgets6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6xml6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev-tools_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5concurrent5_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5core5a_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5dbus5_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5gui5_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5network5_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5printsupport5_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5sql5_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5test5_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5widgets5_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5xml5_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev-tools_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd2_0.11.4-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.11.4-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd0_252.5-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd-dev_252.5-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev1_252.5-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev-dev_252.5-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff5_4.2.0-1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff6_4.5.0-5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff-dev_4.2.0-1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiffxx5_4.2.0-1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tslib/libts0_1.22-1+b1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/unbound/libunbound8_1.13.1-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid1_2.36.1-8+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid-dev_2.36.1-8+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount1_2.36.1-8+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount-dev_2.36.1-8+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libuuid1_2.36.1-8+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/uuid-dev_2.36.1-8+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/valgrind/valgrind_3.16.1-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan1_1.3.224.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan-dev_1.3.224.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-bin_1.18.0-2~exp1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-client0_1.18.0-2~exp1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-cursor0_1.18.0-2~exp1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-dev_1.18.0-2~exp1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl1_1.18.0-2~exp1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl-backend-dev_1.18.0-2~exp1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-server0_1.18.0-2~exp1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0_0.4.0-1+b3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0-dev_0.4.0-1+b3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-keysyms/libxcb-keysyms1_0.4.0-1+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util1_0.4.0-1+b1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util-dev_0.4.0-1+b1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0_0.3.9-1+b1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0-dev_0.3.9-1+b1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-wm/libxcb-icccm4_0.4.1-1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft2_2.3.2-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft-dev_2.3.2-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xz-utils/liblzma5_5.2.5-2.1~deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2+deb11u2_mips64el.deb diff --git a/build/linux/sysroot_scripts/generated_package_lists/bullseye.mipsel b/build/linux/sysroot_scripts/generated_package_lists/bullseye.mipsel new file mode 100644 index 000000000000..21322b23c7ad --- /dev/null +++ b/build/linux/sysroot_scripts/generated_package_lists/bullseye.mipsel @@ -0,0 +1,403 @@ +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-0_2.38.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-dev_2.38.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-4~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-4~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.44.1-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.44.1-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/attr/libattr1_2.4.48-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/audit/libaudit1_3.0-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-client3_0.8-5+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-common3_0.8-5+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth3_5.55-3.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth-dev_5.55-3.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2_1.16.0-5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/colord/libcolord2_1.4.5-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2_2.3.3op2-3+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl3-gnutls_7.88.1-7~bpo11+2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl4-gnutls-dev_7.88.1-7~bpo11+2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-3_1.12.24-0+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-dev_1.12.24-0+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/double-conversion/libdouble-conversion3_3.1.5-6.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.6-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/libcom-err2_1.46.6-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf1_0.187-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf-dev_0.187-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1_2.2.10-2+deb11u5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1-dev_2.2.10-2+deb11u5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac8_1.3.3-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac-dev_1.3.3-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi0_1.0.8-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-s1_10.2.1-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-0_2.66.8-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.8-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6_2.31-13+deb11u5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6-dev_2.31-13+deb11u5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.1-5+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls30_3.7.1-5+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.1-5+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.1-5+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.1-5+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-4+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-4+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-1_4.8.3+ds-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-dev_4.8.3+ds-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu/libicu67_67.1-7_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig-dev_2.1-3.1+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/krb5-multidev_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssrpc4_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libk5crypto3_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkdb5-10_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-3_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-dev_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5support0_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libb2/libb2-1_0.98.1-1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap2_2.44-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap-dev_2.44-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcloudproviders/libcloudproviders0_0.3.0-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate0_1.10-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate-dev_1.10-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy0_1.5.8-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.8-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi7_3.3-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi-dev_3.3-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libice/libice6_1.0.10-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn/libidn11_1.33-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput10_1.16.4-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-4_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-4_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libm/libmd/libmd0_1.0.3-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libproxy/libproxy1v5_0.4.17-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1_3.1-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1_3.1-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsm/libsm6_1.2.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai0_0.1.28-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai-dev_0.1.28-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtool/libltdl7_2.4.6-15_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter0_1.2.1-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter-dev_1.2.1-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva2_2.17.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-dev_2.17.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-drm2_2.17.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-glx2_2.17.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-wayland2_2.17.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-x11-2_2.17.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwacom/libwacom2_1.8-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp6_0.6.1-2.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp7_1.2.4-0.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-6_1.7.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-dev_1.7.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb1_1.7.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau6_1.0.9-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinerama0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinput0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xkb1_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt1_4.4.18-4_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt-dev_4.4.18-4_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi6_1.7.10-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-x11-0_1.0.3-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.7+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.7+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss1_1.2.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt6_1.2.0-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lerc/liblerc4_4.0.0+ds-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/linux/linux-libc-dev_6.1.12-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lz4/liblz4-1_1.9.3-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lzo2/liblzo2-2_2.10-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/md4c/libmd4c0_0.4.7-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa_20.3.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm1_20.3.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm-dev_20.3.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libglapi-mesa_20.3.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/mesa-common-dev_20.3.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip1_1.1-8+b1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libhogweed6_3.7.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libnettle8_3.7.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4_4.29-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4-dev_4.29-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3_3.61-1+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3-dev_3.61-1+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openldap/libldap-2.4-2_2.4.59+dfsg-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl1.1_1.1.1n-0+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl-dev_1.1.1n-0+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus0_1.3.1-0.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g_1.4.0-9+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g-dev_1.4.0-9+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci3_3.7.0-5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-dev_10.36-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre16-3_8.39-13_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre32-3_8.39-13_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3_8.39-13_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.65-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.65-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.65-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1.1~deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1.1~deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse0_14.2-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6concurrent6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6core6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6dbus6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6gui6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6network6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6opengl6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6openglwidgets6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6printsupport6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6sql6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6test6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6widgets6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6xml6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev-tools_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5concurrent5_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5core5a_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5dbus5_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5gui5_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5network5_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5printsupport5_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5sql5_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5test5_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5widgets5_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5xml5_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev-tools_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd2_0.11.4-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.11.4-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd0_252.5-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd-dev_252.5-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev1_252.5-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev-dev_252.5-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff5_4.2.0-1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff6_4.5.0-5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff-dev_4.2.0-1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiffxx5_4.2.0-1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tslib/libts0_1.22-1+b1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/unbound/libunbound8_1.13.1-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid1_2.36.1-8+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid-dev_2.36.1-8+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount1_2.36.1-8+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount-dev_2.36.1-8+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libuuid1_2.36.1-8+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/uuid-dev_2.36.1-8+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan1_1.3.224.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan-dev_1.3.224.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-bin_1.18.0-2~exp1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-client0_1.18.0-2~exp1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-cursor0_1.18.0-2~exp1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-dev_1.18.0-2~exp1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl1_1.18.0-2~exp1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl-backend-dev_1.18.0-2~exp1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-server0_1.18.0-2~exp1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0_0.4.0-1+b3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0-dev_0.4.0-1+b3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-keysyms/libxcb-keysyms1_0.4.0-1+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util1_0.4.0-1+b1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util-dev_0.4.0-1+b1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0_0.3.9-1+b1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0-dev_0.3.9-1+b1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-wm/libxcb-icccm4_0.4.1-1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft2_2.3.2-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft-dev_2.3.2-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xz-utils/liblzma5_5.2.5-2.1~deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2+deb11u2_mipsel.deb diff --git a/build/linux/sysroot_scripts/generated_package_lists/sid.amd64 b/build/linux/sysroot_scripts/generated_package_lists/sid.amd64 deleted file mode 100644 index 29ea13c4dcb9..000000000000 --- a/build/linux/sysroot_scripts/generated_package_lists/sid.amd64 +++ /dev/null @@ -1,372 +0,0 @@ -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-0_2.36.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-dev_2.36.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.38.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.38.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/attr/libattr1_2.4.48-6_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/audit/libaudit1_3.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-client3_0.8-5_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-common3_0.8-5_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth3_5.55-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth-dev_5.55-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2_1.16.0-5_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/colord/libcolord2_1.4.5-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2_2.3.3op2-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl3-gnutls_7.74.0-1.1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl4-gnutls-dev_7.74.0-1.1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-3_1.12.20-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-dev_1.12.20-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.2-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/libcom-err2_1.46.2-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf1_0.183-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf-dev_0.183-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1_2.2.10-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1-dev_2.2.10-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac8_1.3.3-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac-dev_1.3.3-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi0_1.0.8-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libasan6_10.2.1-6_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libitm1_10.2.1-6_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/liblsan0_10.2.1-6_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libquadmath0_10.2.1-6_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libtsan0_10.2.1-6_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libcilkrts5_6.3.0-18+deb9u1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libgcc1_6.3.0-18+deb9u1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libmpx2_6.3.0-18+deb9u1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-0_2.66.7-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.7-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6_2.31-9_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6-dev_2.31-9_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.0-7_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls30_3.7.0-7_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.0-7_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.0-7_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.0-7_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-1_4.0.3-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-1_4.0.3-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-dev_4.0.3-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-dev_4.0.3-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu/libicu67_67.1-6_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/krb5-multidev_1.18.3-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssrpc4_1.18.3-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libk5crypto3_1.18.3-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkdb5-10_1.18.3-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-3_1.18.3-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-dev_1.18.3-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5support0_1.18.3-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator1_0.4.92-8_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-8_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-8_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-8_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap2_2.44-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap-dev_2.44-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdeflate/libdeflate0_1.7-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-intel1_2.4.104-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy0_1.5.5-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.5-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi6_3.2.1-9_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi7_3.3-6_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi-dev_3.3-6_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libice/libice6_1.0.10-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn/libidn11_1.33-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator7_0.5.0-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput10_1.16.4-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libm/libmd/libmd0_1.0.3-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1_3.1-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1_3.1-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsm/libsm6_1.2.3-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai0_0.1.28-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai-dev_0.1.28-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtool/libltdl7_2.4.6-15_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva2_2.10.0-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-dev_2.10.0-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-drm2_2.10.0-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-glx2_2.10.0-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-wayland2_2.10.0-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-x11-2_2.10.0-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx6_1.9.0-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx-dev_1.9.0-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwacom/libwacom2_1.8-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp6_0.6.1-2+b1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2+b1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2+b1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2+b1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-6_1.7.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-dev_1.7.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb1_1.7.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau6_1.0.9-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1_1.14-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi6_1.7.10-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.3+b1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.3+b1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss1_1.2.3-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt6_1.2.0-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/linux/linux-libc-dev_5.10.19-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lz4/liblz4-1_1.9.3-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lzo2/liblzo2-2_2.10-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa_20.3.4-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.4-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm1_20.3.4-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm-dev_20.3.4-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.4-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.4-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libglapi-mesa_20.3.4-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.4-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/mesa-common-dev_20.3.4-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip1_1.1-8+b1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libhogweed6_3.7-2.1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libnettle8_3.7-2.1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4_4.29-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4-dev_4.29-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3_3.61-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3-dev_3.61-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openldap/libldap-2.4-2_2.4.57+dfsg-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl1.1_1.1.1j-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl-dev_1.1.1j-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus0_1.3.1-0.1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g_1.4.0-6_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g-dev_1.4.0-6_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci3_3.7.0-5_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-dev_10.36-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix0_10.22-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre16-3_8.39-13_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre32-3_8.39-13_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3_8.39-13_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.19-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.19-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.19-4_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse0_14.2-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd2_0.10.2-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.10.2-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/speech-dispatcher_0.10.2-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libsystemd0_247.3-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev1_247.3-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev-dev_247.3-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tiff/libtiff5_4.2.0-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/unbound/libunbound8_1.13.1-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid1_2.36.1-7_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid-dev_2.36.1-7_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount1_2.36.1-7_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount-dev_2.36.1-7_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libuuid1_2.36.1-7_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/uuid-dev_2.36.1-7_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan1_1.2.162.0-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan-dev_1.2.162.0-1_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-client0_1.19.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-cursor0_1.19.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-dev_1.19.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-egl1_1.19.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-server0_1.19.0-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft2_2.3.2-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft-dev_2.3.2-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-input-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-kb-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-randr-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-record-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-render-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xext-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xinerama-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xz-utils/liblzma5_5.2.5-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2_amd64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2_amd64.deb diff --git a/build/linux/sysroot_scripts/generated_package_lists/sid.arm b/build/linux/sysroot_scripts/generated_package_lists/sid.arm deleted file mode 100644 index b91ebc48a8a7..000000000000 --- a/build/linux/sysroot_scripts/generated_package_lists/sid.arm +++ /dev/null @@ -1,368 +0,0 @@ -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-0_2.36.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-dev_2.36.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.38.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.38.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/attr/libattr1_2.4.48-6_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/audit/libaudit1_3.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-client3_0.8-5_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-common3_0.8-5_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth3_5.55-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth-dev_5.55-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2_1.16.0-5_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/colord/libcolord2_1.4.5-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2_2.3.3op2-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl3-gnutls_7.74.0-1.1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl4-gnutls-dev_7.74.0-1.1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-3_1.12.20-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-dev_1.12.20-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.2-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/libcom-err2_1.46.2-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf1_0.183-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf-dev_0.183-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1_2.2.10-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1-dev_2.2.10-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac8_1.3.3-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac-dev_1.3.3-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi0_1.0.8-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libasan6_10.2.1-6_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libgcc1_6.3.0-18+deb9u1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-0_2.66.7-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.7-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6_2.31-9_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6-dev_2.31-9_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.0-7_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls30_3.7.0-7_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.0-7_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.0-7_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.0-7_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-1_4.0.3-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-dev_4.0.3-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu/libicu67_67.1-6_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/krb5-multidev_1.18.3-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssrpc4_1.18.3-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libk5crypto3_1.18.3-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkdb5-10_1.18.3-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-3_1.18.3-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-dev_1.18.3-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5support0_1.18.3-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator1_0.4.92-8_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-8_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-8_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-8_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap2_2.44-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap-dev_2.44-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdeflate/libdeflate0_1.7-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-etnaviv1_2.4.104-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-exynos1_2.4.104-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-freedreno1_2.4.104-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-omap1_2.4.104-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-tegra0_2.4.104-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy0_1.5.5-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.5-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi6_3.2.1-9_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi7_3.3-6_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi-dev_3.3-6_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libice/libice6_1.0.10-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn/libidn11_1.33-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator7_0.5.0-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput10_1.16.4-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libm/libmd/libmd0_1.0.3-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1_3.1-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1_3.1-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsm/libsm6_1.2.3-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai0_0.1.28-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai-dev_0.1.28-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtool/libltdl7_2.4.6-15_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva2_2.10.0-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-dev_2.10.0-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-drm2_2.10.0-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-glx2_2.10.0-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-wayland2_2.10.0-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-x11-2_2.10.0-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx6_1.9.0-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx-dev_1.9.0-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwacom/libwacom2_1.8-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp6_0.6.1-2+b1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2+b1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2+b1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2+b1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-6_1.7.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-dev_1.7.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb1_1.7.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau6_1.0.9-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1_1.14-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi6_1.7.10-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.3+b1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.3+b1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss1_1.2.3-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt6_1.2.0-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/linux/linux-libc-dev_5.10.19-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lz4/liblz4-1_1.9.3-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lzo2/liblzo2-2_2.10-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa_20.3.4-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.4-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm1_20.3.4-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm-dev_20.3.4-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.4-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.4-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libglapi-mesa_20.3.4-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.4-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/mesa-common-dev_20.3.4-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip1_1.1-8+b1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libhogweed6_3.7-2.1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libnettle8_3.7-2.1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4_4.29-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4-dev_4.29-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3_3.61-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3-dev_3.61-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openldap/libldap-2.4-2_2.4.57+dfsg-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl1.1_1.1.1j-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl-dev_1.1.1j-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus0_1.3.1-0.1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g_1.4.0-6_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g-dev_1.4.0-6_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci3_3.7.0-5_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-dev_10.36-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix0_10.22-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre16-3_8.39-13_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre32-3_8.39-13_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3_8.39-13_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.19-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.19-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.19-4_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse0_14.2-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd2_0.10.2-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.10.2-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/speech-dispatcher_0.10.2-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libsystemd0_247.3-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev1_247.3-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev-dev_247.3-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tiff/libtiff5_4.2.0-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/unbound/libunbound8_1.13.1-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid1_2.36.1-7_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid-dev_2.36.1-7_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount1_2.36.1-7_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount-dev_2.36.1-7_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libuuid1_2.36.1-7_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/uuid-dev_2.36.1-7_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan1_1.2.162.0-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan-dev_1.2.162.0-1_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-client0_1.19.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-cursor0_1.19.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-dev_1.19.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-egl1_1.19.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-server0_1.19.0-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft2_2.3.2-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft-dev_2.3.2-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-input-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-kb-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-randr-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-record-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-render-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xext-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xinerama-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xz-utils/liblzma5_5.2.5-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2_armhf.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2_armhf.deb diff --git a/build/linux/sysroot_scripts/generated_package_lists/sid.arm64 b/build/linux/sysroot_scripts/generated_package_lists/sid.arm64 deleted file mode 100644 index 4db2b8a99440..000000000000 --- a/build/linux/sysroot_scripts/generated_package_lists/sid.arm64 +++ /dev/null @@ -1,371 +0,0 @@ -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-0_2.36.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-dev_2.36.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.38.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.38.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/attr/libattr1_2.4.48-6_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/audit/libaudit1_3.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-client3_0.8-5_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-common3_0.8-5_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth3_5.55-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth-dev_5.55-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2_1.16.0-5_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/colord/libcolord2_1.4.5-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2_2.3.3op2-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl3-gnutls_7.74.0-1.1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl4-gnutls-dev_7.74.0-1.1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-3_1.12.20-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-dev_1.12.20-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.2-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/libcom-err2_1.46.2-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf1_0.183-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf-dev_0.183-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1_2.2.10-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1-dev_2.2.10-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac8_1.3.3-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac-dev_1.3.3-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi0_1.0.8-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libasan6_10.2.1-6_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libitm1_10.2.1-6_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/liblsan0_10.2.1-6_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libtsan0_10.2.1-6_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libgcc1_6.3.0-18+deb9u1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-0_2.66.7-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.7-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6_2.31-9_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6-dev_2.31-9_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.0-7_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls30_3.7.0-7_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.0-7_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.0-7_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.0-7_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-1_4.0.3-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-dev_4.0.3-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu/libicu67_67.1-6_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/krb5-multidev_1.18.3-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssrpc4_1.18.3-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libk5crypto3_1.18.3-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkdb5-10_1.18.3-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-3_1.18.3-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-dev_1.18.3-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5support0_1.18.3-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator1_0.4.92-8_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-8_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-8_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-8_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap2_2.44-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap-dev_2.44-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdeflate/libdeflate0_1.7-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-etnaviv1_2.4.104-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-freedreno1_2.4.104-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-tegra0_2.4.104-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy0_1.5.5-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.5-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi6_3.2.1-9_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi7_3.3-6_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi-dev_3.3-6_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libice/libice6_1.0.10-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn/libidn11_1.33-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator7_0.5.0-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput10_1.16.4-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libm/libmd/libmd0_1.0.3-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1_3.1-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1_3.1-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsm/libsm6_1.2.3-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai0_0.1.28-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai0_0.1.28-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai-dev_0.1.28-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtool/libltdl7_2.4.6-15_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva2_2.10.0-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-dev_2.10.0-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-drm2_2.10.0-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-glx2_2.10.0-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-wayland2_2.10.0-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-x11-2_2.10.0-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx6_1.9.0-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx-dev_1.9.0-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwacom/libwacom2_1.8-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp6_0.6.1-2+b1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2+b1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2+b1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2+b1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-6_1.7.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-dev_1.7.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb1_1.7.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau6_1.0.9-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1_1.14-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi6_1.7.10-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.3+b1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.3+b1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss1_1.2.3-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt6_1.2.0-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/linux/linux-libc-dev_5.10.19-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lz4/liblz4-1_1.9.3-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lzo2/liblzo2-2_2.10-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa_20.3.4-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.4-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm1_20.3.4-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm-dev_20.3.4-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.4-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.4-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libglapi-mesa_20.3.4-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.4-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/mesa-common-dev_20.3.4-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip1_1.1-8+b1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libhogweed6_3.7-2.1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libnettle8_3.7-2.1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4_4.29-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4-dev_4.29-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3_3.61-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3-dev_3.61-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openldap/libldap-2.4-2_2.4.57+dfsg-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl1.1_1.1.1j-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl-dev_1.1.1j-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus0_1.3.1-0.1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g_1.4.0-6_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g-dev_1.4.0-6_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci3_3.7.0-5_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-dev_10.36-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix0_10.22-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre16-3_8.39-13_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre32-3_8.39-13_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3_8.39-13_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.19-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.19-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.19-4_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse0_14.2-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd2_0.10.2-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.10.2-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/speech-dispatcher_0.10.2-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libsystemd0_247.3-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev1_247.3-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev-dev_247.3-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tiff/libtiff5_4.2.0-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/unbound/libunbound8_1.13.1-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid1_2.36.1-7_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid-dev_2.36.1-7_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount1_2.36.1-7_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount-dev_2.36.1-7_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libuuid1_2.36.1-7_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/uuid-dev_2.36.1-7_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan1_1.2.162.0-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan-dev_1.2.162.0-1_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-client0_1.19.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-cursor0_1.19.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-dev_1.19.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-egl1_1.19.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-server0_1.19.0-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft2_2.3.2-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft-dev_2.3.2-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-input-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-kb-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-randr-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-record-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-render-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xext-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xinerama-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xz-utils/liblzma5_5.2.5-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2_arm64.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2_arm64.deb diff --git a/build/linux/sysroot_scripts/generated_package_lists/sid.armel b/build/linux/sysroot_scripts/generated_package_lists/sid.armel deleted file mode 100644 index f8f54f343dcd..000000000000 --- a/build/linux/sysroot_scripts/generated_package_lists/sid.armel +++ /dev/null @@ -1,367 +0,0 @@ -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-0_2.36.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-dev_2.36.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.38.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.38.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/attr/libattr1_2.4.48-6_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/audit/libaudit1_3.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-client3_0.8-5_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-common3_0.8-5_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth3_5.55-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth-dev_5.55-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2_1.16.0-5_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/colord/libcolord2_1.4.5-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2_2.3.3op2-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl3-gnutls_7.74.0-1.1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl4-gnutls-dev_7.74.0-1.1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-3_1.12.20-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-dev_1.12.20-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.2-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/libcom-err2_1.46.2-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf1_0.183-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf-dev_0.183-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1_2.2.10-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1-dev_2.2.10-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac8_1.3.3-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac-dev_1.3.3-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi0_1.0.8-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libasan6_10.2.1-6_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libgcc1_6.3.0-18+deb9u1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-0_2.66.7-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.7-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6_2.31-9_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6-dev_2.31-9_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.0-7_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls30_3.7.0-7_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.0-7_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.0-7_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.0-7_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-1_4.0.3-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-dev_4.0.3-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu/libicu67_67.1-6_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/krb5-multidev_1.18.3-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssrpc4_1.18.3-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libk5crypto3_1.18.3-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkdb5-10_1.18.3-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-3_1.18.3-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-dev_1.18.3-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5support0_1.18.3-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator1_0.4.92-8_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-8_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-8_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-8_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap2_2.44-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap-dev_2.44-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdeflate/libdeflate0_1.7-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-exynos1_2.4.104-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-freedreno1_2.4.104-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-omap1_2.4.104-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-tegra0_2.4.104-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy0_1.5.5-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.5-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi6_3.2.1-9_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi7_3.3-6_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi-dev_3.3-6_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libice/libice6_1.0.10-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn/libidn11_1.33-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator7_0.5.0-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput10_1.16.4-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libm/libmd/libmd0_1.0.3-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1_3.1-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1_3.1-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsm/libsm6_1.2.3-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai0_0.1.28-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai-dev_0.1.28-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtool/libltdl7_2.4.6-15_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva2_2.10.0-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-dev_2.10.0-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-drm2_2.10.0-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-glx2_2.10.0-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-wayland2_2.10.0-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-x11-2_2.10.0-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx6_1.9.0-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx-dev_1.9.0-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwacom/libwacom2_1.8-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp6_0.6.1-2+b1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2+b1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2+b1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2+b1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-6_1.7.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-dev_1.7.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb1_1.7.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau6_1.0.9-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1_1.14-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi6_1.7.10-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.3+b1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.3+b1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss1_1.2.3-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt6_1.2.0-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/linux/linux-libc-dev_5.10.19-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lz4/liblz4-1_1.9.3-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lzo2/liblzo2-2_2.10-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa_20.3.4-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.4-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm1_20.3.4-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm-dev_20.3.4-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.4-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.4-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libglapi-mesa_20.3.4-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.4-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/mesa-common-dev_20.3.4-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip1_1.1-8+b1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libhogweed6_3.7-2.1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libnettle8_3.7-2.1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4_4.29-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4-dev_4.29-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3_3.61-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3-dev_3.61-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openldap/libldap-2.4-2_2.4.57+dfsg-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl1.1_1.1.1j-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl-dev_1.1.1j-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus0_1.3.1-0.1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g_1.4.0-6_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g-dev_1.4.0-6_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci3_3.7.0-5_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-dev_10.36-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix0_10.22-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre16-3_8.39-13_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre32-3_8.39-13_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3_8.39-13_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.19-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.19-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.19-4_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse0_14.2-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd2_0.10.2-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.10.2-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/speech-dispatcher_0.10.2-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libsystemd0_247.3-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev1_247.3-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev-dev_247.3-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tiff/libtiff5_4.2.0-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/unbound/libunbound8_1.13.1-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid1_2.36.1-7_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid-dev_2.36.1-7_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount1_2.36.1-7_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount-dev_2.36.1-7_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libuuid1_2.36.1-7_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/uuid-dev_2.36.1-7_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan1_1.2.162.0-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan-dev_1.2.162.0-1_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-client0_1.19.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-cursor0_1.19.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-dev_1.19.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-egl1_1.19.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-server0_1.19.0-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft2_2.3.2-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft-dev_2.3.2-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-input-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-kb-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-randr-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-record-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-render-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xext-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xinerama-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xz-utils/liblzma5_5.2.5-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2_armel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2_armel.deb diff --git a/build/linux/sysroot_scripts/generated_package_lists/sid.i386 b/build/linux/sysroot_scripts/generated_package_lists/sid.i386 deleted file mode 100644 index cdeaf9a2b780..000000000000 --- a/build/linux/sysroot_scripts/generated_package_lists/sid.i386 +++ /dev/null @@ -1,368 +0,0 @@ -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-0_2.36.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-dev_2.36.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.38.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.38.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/attr/libattr1_2.4.48-6_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/audit/libaudit1_3.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-client3_0.8-5_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-common3_0.8-5_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth3_5.55-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth-dev_5.55-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2_1.16.0-5_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/colord/libcolord2_1.4.5-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2_2.3.3op2-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl3-gnutls_7.74.0-1.1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl4-gnutls-dev_7.74.0-1.1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-3_1.12.20-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-dev_1.12.20-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.2-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/libcom-err2_1.46.2-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf1_0.183-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf-dev_0.183-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1_2.2.10-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1-dev_2.2.10-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac8_1.3.3-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac-dev_1.3.3-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi0_1.0.8-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libasan6_10.2.1-6_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libitm1_10.2.1-6_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libquadmath0_10.2.1-6_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libcilkrts5_6.3.0-18+deb9u1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libgcc1_6.3.0-18+deb9u1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libmpx2_6.3.0-18+deb9u1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-0_2.66.7-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.7-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6_2.31-9_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6-dev_2.31-9_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.0-7_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls30_3.7.0-7_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.0-7_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.0-7_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.0-7_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-1_4.0.3-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-dev_4.0.3-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu/libicu67_67.1-6_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/krb5-multidev_1.18.3-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssrpc4_1.18.3-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libk5crypto3_1.18.3-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkdb5-10_1.18.3-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-3_1.18.3-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-dev_1.18.3-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5support0_1.18.3-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator1_0.4.92-8_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-8_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-8_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-8_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap2_2.44-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap-dev_2.44-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdeflate/libdeflate0_1.7-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-intel1_2.4.104-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy0_1.5.5-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.5-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi6_3.2.1-9_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi7_3.3-6_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi-dev_3.3-6_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libice/libice6_1.0.10-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn/libidn11_1.33-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator7_0.5.0-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput10_1.16.4-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libm/libmd/libmd0_1.0.3-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1_3.1-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1_3.1-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsm/libsm6_1.2.3-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai0_0.1.28-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai-dev_0.1.28-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtool/libltdl7_2.4.6-15_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva2_2.10.0-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-dev_2.10.0-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-drm2_2.10.0-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-glx2_2.10.0-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-wayland2_2.10.0-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-x11-2_2.10.0-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx6_1.9.0-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx-dev_1.9.0-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwacom/libwacom2_1.8-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp6_0.6.1-2+b1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2+b1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2+b1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2+b1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-6_1.7.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-dev_1.7.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb1_1.7.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau6_1.0.9-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1_1.14-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi6_1.7.10-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.3+b1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.3+b1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss1_1.2.3-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt6_1.2.0-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/linux/linux-libc-dev_5.10.19-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lz4/liblz4-1_1.9.3-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lzo2/liblzo2-2_2.10-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa_20.3.4-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.4-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm1_20.3.4-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm-dev_20.3.4-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.4-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.4-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libglapi-mesa_20.3.4-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.4-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/mesa-common-dev_20.3.4-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip1_1.1-8+b1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libhogweed6_3.7-2.1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libnettle8_3.7-2.1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4_4.29-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4-dev_4.29-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3_3.61-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3-dev_3.61-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openldap/libldap-2.4-2_2.4.57+dfsg-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl1.1_1.1.1j-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl-dev_1.1.1j-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus0_1.3.1-0.1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g_1.4.0-6_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g-dev_1.4.0-6_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci3_3.7.0-5_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-dev_10.36-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix0_10.22-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre16-3_8.39-13_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre32-3_8.39-13_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3_8.39-13_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.19-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.19-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.19-4_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse0_14.2-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd2_0.10.2-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.10.2-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/speech-dispatcher_0.10.2-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libsystemd0_247.3-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev1_247.3-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev-dev_247.3-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tiff/libtiff5_4.2.0-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/unbound/libunbound8_1.13.1-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid1_2.36.1-7_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid-dev_2.36.1-7_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount1_2.36.1-7_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount-dev_2.36.1-7_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libuuid1_2.36.1-7_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/uuid-dev_2.36.1-7_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan1_1.2.162.0-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan-dev_1.2.162.0-1_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-client0_1.19.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-cursor0_1.19.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-dev_1.19.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-egl1_1.19.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-server0_1.19.0-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft2_2.3.2-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft-dev_2.3.2-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-input-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-kb-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-randr-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-record-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-render-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xext-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xinerama-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xz-utils/liblzma5_5.2.5-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2_i386.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2_i386.deb diff --git a/build/linux/sysroot_scripts/generated_package_lists/sid.mips64el b/build/linux/sysroot_scripts/generated_package_lists/sid.mips64el deleted file mode 100644 index a3b6559aa287..000000000000 --- a/build/linux/sysroot_scripts/generated_package_lists/sid.mips64el +++ /dev/null @@ -1,359 +0,0 @@ -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-0_2.36.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-dev_2.36.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.38.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.38.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/attr/libattr1_2.4.48-6_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/audit/libaudit1_3.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-client3_0.8-5_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-common3_0.8-5_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth3_5.55-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth-dev_5.55-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2_1.16.0-5_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/colord/libcolord2_1.4.5-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2_2.3.3op2-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl3-gnutls_7.74.0-1.1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl4-gnutls-dev_7.74.0-1.1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-3_1.12.20-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-dev_1.12.20-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.2-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/libcom-err2_1.46.2-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf1_0.183-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf-dev_0.183-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1_2.2.10-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1-dev_2.2.10-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac8_1.3.3-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac-dev_1.3.3-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi0_1.0.8-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libgcc1_6.3.0-18+deb9u1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-0_2.66.7-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.7-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6_2.31-9_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6-dev_2.31-9_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.0-7_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls30_3.7.0-7_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.0-7_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.0-7_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.0-7_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu/libicu67_67.1-6_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/krb5-multidev_1.18.3-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssrpc4_1.18.3-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libk5crypto3_1.18.3-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkdb5-10_1.18.3-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-3_1.18.3-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-dev_1.18.3-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5support0_1.18.3-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator1_0.4.92-8_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-8_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-8_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-8_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap2_2.44-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap-dev_2.44-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdeflate/libdeflate0_1.7-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy0_1.5.5-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.5-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi6_3.2.1-9_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi7_3.3-6_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi-dev_3.3-6_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libice/libice6_1.0.10-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn/libidn11_1.33-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator7_0.5.0-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput10_1.16.4-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libm/libmd/libmd0_1.0.3-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1_3.1-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1_3.1-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsm/libsm6_1.2.3-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai0_0.1.28-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai-dev_0.1.28-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtool/libltdl7_2.4.6-15_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva2_2.10.0-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-dev_2.10.0-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-drm2_2.10.0-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-glx2_2.10.0-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-wayland2_2.10.0-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-x11-2_2.10.0-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx6_1.9.0-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx-dev_1.9.0-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwacom/libwacom2_1.8-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp6_0.6.1-2+b1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2+b1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2+b1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2+b1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-6_1.7.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-dev_1.7.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb1_1.7.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau6_1.0.9-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1_1.14-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi6_1.7.10-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.3+b1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.3+b1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss1_1.2.3-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt6_1.2.0-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/linux/linux-libc-dev_5.10.19-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lz4/liblz4-1_1.9.3-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lzo2/liblzo2-2_2.10-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa_20.3.4-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.4-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm1_20.3.4-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm-dev_20.3.4-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.4-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.4-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libglapi-mesa_20.3.4-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.4-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/mesa-common-dev_20.3.4-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip1_1.1-8+b1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libhogweed6_3.7-2.1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libnettle8_3.7-2.1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4_4.29-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4-dev_4.29-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3_3.61-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3-dev_3.61-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openldap/libldap-2.4-2_2.4.57+dfsg-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl1.1_1.1.1j-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl-dev_1.1.1j-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus0_1.3.1-0.1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g_1.4.0-6_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g-dev_1.4.0-6_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci3_3.7.0-5_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-dev_10.36-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix0_10.22-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre16-3_8.39-13_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre32-3_8.39-13_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3_8.39-13_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.19-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.19-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.19-4_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse0_14.2-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd2_0.10.2-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.10.2-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/speech-dispatcher_0.10.2-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libsystemd0_247.3-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev1_247.3-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev-dev_247.3-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tiff/libtiff5_4.2.0-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/unbound/libunbound8_1.13.1-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid1_2.36.1-7_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid-dev_2.36.1-7_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount1_2.36.1-7_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount-dev_2.36.1-7_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libuuid1_2.36.1-7_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/uuid-dev_2.36.1-7_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan1_1.2.162.0-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan-dev_1.2.162.0-1_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-client0_1.19.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-cursor0_1.19.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-dev_1.19.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-egl1_1.19.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-server0_1.19.0-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft2_2.3.2-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft-dev_2.3.2-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-input-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-kb-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-randr-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-record-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-render-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xext-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xinerama-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xz-utils/liblzma5_5.2.5-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2_mips64el.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2_mips64el.deb diff --git a/build/linux/sysroot_scripts/generated_package_lists/sid.mipsel b/build/linux/sysroot_scripts/generated_package_lists/sid.mipsel deleted file mode 100644 index 0aa38490bd36..000000000000 --- a/build/linux/sysroot_scripts/generated_package_lists/sid.mipsel +++ /dev/null @@ -1,359 +0,0 @@ -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-0_2.36.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-dev_2.36.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.38.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.38.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/attr/libattr1_2.4.48-6_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/audit/libaudit1_3.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-client3_0.8-5_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-common3_0.8-5_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth3_5.55-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth-dev_5.55-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2_1.16.0-5_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/colord/libcolord2_1.4.5-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2_2.3.3op2-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl3-gnutls_7.74.0-1.1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl4-gnutls-dev_7.74.0-1.1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-3_1.12.20-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-dev_1.12.20-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.2-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/libcom-err2_1.46.2-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf1_0.183-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf-dev_0.183-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1_2.2.10-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1-dev_2.2.10-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac8_1.3.3-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac-dev_1.3.3-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi0_1.0.8-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libgcc1_6.3.0-18+deb9u1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-0_2.66.7-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.7-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6_2.31-9_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6-dev_2.31-9_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.0-7_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls30_3.7.0-7_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.0-7_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.0-7_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.0-7_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu/libicu67_67.1-6_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/krb5-multidev_1.18.3-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssrpc4_1.18.3-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libk5crypto3_1.18.3-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkdb5-10_1.18.3-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-3_1.18.3-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-dev_1.18.3-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5support0_1.18.3-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator1_0.4.92-8_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-8_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-8_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-8_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap2_2.44-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap-dev_2.44-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdeflate/libdeflate0_1.7-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy0_1.5.5-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.5-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi6_3.2.1-9_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi7_3.3-6_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi-dev_3.3-6_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libice/libice6_1.0.10-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn/libidn11_1.33-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator7_0.5.0-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput10_1.16.4-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libm/libmd/libmd0_1.0.3-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1_3.1-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1_3.1-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsm/libsm6_1.2.3-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai0_0.1.28-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai-dev_0.1.28-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtool/libltdl7_2.4.6-15_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva2_2.10.0-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-dev_2.10.0-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-drm2_2.10.0-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-glx2_2.10.0-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-wayland2_2.10.0-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-x11-2_2.10.0-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx6_1.9.0-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx-dev_1.9.0-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwacom/libwacom2_1.8-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp6_0.6.1-2+b1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2+b1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2+b1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2+b1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-6_1.7.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-dev_1.7.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb1_1.7.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau6_1.0.9-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1_1.14-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi6_1.7.10-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.3+b1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.3+b1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss1_1.2.3-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt6_1.2.0-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/linux/linux-libc-dev_5.10.19-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lz4/liblz4-1_1.9.3-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lzo2/liblzo2-2_2.10-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa_20.3.4-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.4-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm1_20.3.4-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm-dev_20.3.4-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.4-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.4-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libglapi-mesa_20.3.4-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.4-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/mesa-common-dev_20.3.4-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip1_1.1-8+b1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libhogweed6_3.7-2.1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libnettle8_3.7-2.1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4_4.29-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4-dev_4.29-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3_3.61-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3-dev_3.61-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openldap/libldap-2.4-2_2.4.57+dfsg-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl1.1_1.1.1j-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl-dev_1.1.1j-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus0_1.3.1-0.1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g_1.4.0-6_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g-dev_1.4.0-6_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci3_3.7.0-5_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-dev_10.36-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix0_10.22-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre16-3_8.39-13_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre32-3_8.39-13_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3_8.39-13_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.19-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.19-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.19-4_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse0_14.2-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd2_0.10.2-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.10.2-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/speech-dispatcher_0.10.2-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libsystemd0_247.3-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev1_247.3-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev-dev_247.3-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tiff/libtiff5_4.2.0-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/unbound/libunbound8_1.13.1-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid1_2.36.1-7_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid-dev_2.36.1-7_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount1_2.36.1-7_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount-dev_2.36.1-7_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libuuid1_2.36.1-7_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/uuid-dev_2.36.1-7_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan1_1.2.162.0-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan-dev_1.2.162.0-1_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-client0_1.19.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-cursor0_1.19.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-dev_1.19.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-egl1_1.19.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-server0_1.19.0-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft2_2.3.2-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft-dev_2.3.2-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-input-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-kb-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-randr-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-record-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-render-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xext-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xinerama-dev_2020.1-1_all.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xz-utils/liblzma5_5.2.5-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2_mipsel.deb -https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2_mipsel.deb diff --git a/build/linux/sysroot_scripts/install-sysroot.py b/build/linux/sysroot_scripts/install-sysroot.py index f8b7906cc557..42842a184deb 100755 --- a/build/linux/sysroot_scripts/install-sysroot.py +++ b/build/linux/sysroot_scripts/install-sysroot.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -18,7 +18,10 @@ # time chrome's build dependencies are changed but should also be updated # periodically to include upstream security fixes from Debian. -from __future__ import print_function +# This script looks at sysroots.json next to it to find the name of a .tar.xz +# to download and the location to extract it to. The extracted sysroot could for +# example be in build/linux/debian_bullseye_amd64-sysroot/. + import hashlib import json @@ -30,11 +33,11 @@ import subprocess import sys try: - # For Python 3.0 and later - from urllib.request import urlopen + # For Python 3.0 and later + from urllib.request import urlopen except ImportError: - # Fall back to Python 2's urllib2 - from urllib2 import urlopen + # Fall back to Python 2's urllib2 + from urllib2 import urlopen SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -50,7 +53,8 @@ 'mips64': 'mips64el', } -DEFAULT_TARGET_PLATFORM = 'sid' +DEFAULT_TARGET_PLATFORM = 'bullseye' + class Error(Exception): pass @@ -75,14 +79,15 @@ def main(args): parser.add_option('--all', action='store_true', help='Install all sysroot images (useful when updating the' ' images)') - parser.add_option('--print-hash', + parser.add_option('--print-key', help='Print the hash of the sysroot for the given arch.') options, _ = parser.parse_args(args) - if options.print_hash: - arch = options.print_hash - print(GetSysrootDict(DEFAULT_TARGET_PLATFORM, - ARCH_TRANSLATIONS.get(arch, arch))['Sha1Sum']) + if options.print_key: + arch = options.print_key + print( + GetSysrootDict(DEFAULT_TARGET_PLATFORM, + ARCH_TRANSLATIONS.get(arch, arch))['Key']) return 0 if options.arch: InstallSysroot(DEFAULT_TARGET_PLATFORM, @@ -150,7 +155,7 @@ def InstallSysroot(target_platform, target_arch): if sha1sum != tarball_sha1sum: raise Error('Tarball sha1sum is wrong.' 'Expected %s, actual: %s' % (tarball_sha1sum, sha1sum)) - subprocess.check_call(['tar', 'xf', tarball, '-C', sysroot]) + subprocess.check_call(['tar', 'mxf', tarball, '-C', sysroot]) os.remove(tarball) with open(stamp, 'w') as s: diff --git a/build/linux/sysroot_scripts/debian_archive_unstable.gpg b/build/linux/sysroot_scripts/keyring.gpg similarity index 86% rename from build/linux/sysroot_scripts/debian_archive_unstable.gpg rename to build/linux/sysroot_scripts/keyring.gpg index e86a7007a80131af714c63c8783549e0bd83421d..81e2624c5f3845a1642d9379bf47ed2221a291a4 100644 GIT binary patch delta 12488 zcma)?V{oP2x~^yJp0RCo%#MwYjSf3b$M%eE+qP}nwrzJhINw^UzKyfbKDGWnRpZxt z)%`yAHOAT-WaTcDPXm}1KPVL(2~wXLRSpapFm$rOhDNAAE>f6bE@m=t8k*?LKtTc^ zF+7c3LjyFxJ>Byt~9=rht>H>Ko_V@5F&Jf0Oq)JG?S+ z&7s2lojn=Z!yVhe>U^9$;u1}@u%BLr4S|GVK8rslrY(NBsP-R4J>i|kO9Uu&xY+iq z>)v!X43hg7FYy)Mn}O~vIcx=?LLf@Q_}Tl((~7#oGE9?NlgIL^uvY`Y2vh%P)aXmg z@p=K4r)`=O&L6-O9`H)r|uu0 z;#ry>;a5?^xoDV>*3Mo^9Kqkbn8lyuX%hh`U;wZNspJkGFnoAs#-C1>mJY^l#>qQ8 zV2q#wYNf~}g?cK7jXh3v6!{a-2AdBmI4Ei#JR%*GeH+wxcvK}4{h$+qVaEAo{L7b*#bX4! zYCwlWntJUuRcQ05g~i5ykxt(GgJYABJ66zZa@63pCJI9`3pSDwc1VW7EXtrK>e&$j-u<5L7XmC8}Fc>U1q`8P!YIsPzX7&u_Dz;O01*Zfg>T8^GFC#2g zc65uO;rqx%pL}$}PrVr53}Z}5R~_#sx^H0F_en-)SJ#qER|Ml71xL+mLp!=vh;+>o zrYtxz@EV9tdOL*43<{uQdMTJe-$@EAZg@{bZ=~Mfl@iRM6%syw0WG0IU-%#-+{$9m ze=~gQNzRsxoQu6@&xHd~g|i-+N@^TSQ3f4YlcW7XhVFfZM|yrwUcS1wa64azf0Qpt zabQ4tQlTAwFN1YHjSv-BihwRB8J&-Yndwsd&W1K?o@l9-FSf~A4twi!PZdbh_e=fa9k zKv#*bFzQHtFJV2s27v@wcx>BN?zLm9D=DcMe~!i&79YQh*ex>s*wYF@;1v6&s}Qv^ z4W*`dDam{_<}sT3VuSS0=z+wq`73%#Y?gO~6zQL|A~o{wCWdvh%=l}3`4rrfA;7e5UzXk(zb zT*m5E!mTEV9oF<2?%YitQQs%yS4nP}=_E!pr7TAgTzWuAkhQnYAzj}jw8-}uK0lOp zh+HCK+{$G}{_JKdT`7L3G{M*;C*1T|^N?X6>TPkHf|PQWx1ZMb>@_yYJtG+y8Fu~E zVZa;Q$j?amupS2qaj-mh+C!LNnI8Gi-$K%pSdH0`0Mpdx!7H3vwHL!7`iYf5X)Wq` zlUIBB=bfdZlx^UnKM^ur!D)@sGZvy*F+pMl|JM4h`j&aEcZn3P0>IOXm^&rwY%__d zSkL|lv;@CR5*eApxv#t)Lfyb!a4PdG2i}ht`Tp`H{ukHY+K7#C)=Mb2YBaTP?aJV7 z(m0ft3AWOH7G3Jl#Z>$sa>YC<(-JO8OOA3zqPd+Pj&Hi8`?|N_x2WOj%}LVP9lV zkIvGqTxDiqaR`|ij)FL;PQyme(vhV2iFV&5SC~BXBdVw?km|+@<&McWpr9Pdwh@yx zg16)kcJ}Irvy(O;Xg!i(~~`xoTEifxV0z(CRiLn^s}&fh=Uy0%mU$4?B# zQT^uJ2bg}5hd?eE8r%to)a}<8xI)F$$9)xg=GuhpMv#f~tG7(=3*b=22f$s5tyeNC zsooqI7eT6YnB#i!+fZ*=$;Z}gJTFc%=FRSC;y?SeK74v_L3B|+_K5Nzfn zmFUV}!$7mWcp2XV_53=_32ODr*W4_-V&k|a)0KXYEwuS4Izn%E3-8h28-jar^^J*F zlLZm+lofaY1a`|H-TEA-MFHL#yfw)*$SQl{s}r(#gQYJkLj7pIF}>0qYnTW)*2Mz$ z{@EXAtT0R#x!sJs$XlVrEri)?ZYPut zZCDuS!$XR8RIe$rz&ip9*m?B*ejc(wgqgHA#EsHm=ZM^)Y3_<_>P0%u+qB^5AU#M+ zH&W1k4ZvO>Q$H7;%Y8clyZl|=}yCr`H5Vo*{y z&h7t%qr@UAn!o1#B>_v7CF1|b&VQnVh3Eqo?>mj2Y(oJC^B)W$4*}otV!rS!G-yr+ zqOiT600M5%7+=|nF%0bhd{;R_%Jf{3oTX5_nWRIqVzf}Q@FWH~-ap*wM3 z+D+mwRs1PbLo#HQg7I_bEzmqX2a2^gmP`x8ih+VFk=e&7^IV+_pX$I)jT8<`ZAe=c z!D=GF%eNS;rCoNqzAQ5;taLnR%GzH;-FJ3>8&GVkREZSrjSs2kN-Z^U9h+vY-}8_~1%)`#xss2|*+bFlI^Wx7nz zAWDL~+n#nj3Bk!6U6oBQLg(j}Ap;n+>%7;rTjUw=JeA$T6E*qbj7pJm+z5}Qqe;2W zi*NjG6dx+ES4Faps2+XXC>@n;2O$!wsWnJIoF+93k)QumnI^DkdW#+%3_vs7X5}1j zdOO$=buYN&dri%ZGQ))XA!nzLqvOdzXtrWs>B@jbl7$JrxXVWR?=h5k3-+J!0r<4LY5aOhhaBot3={qO8!jYgKoN>{;YU>ysQo{Gq%6 zdKZV@FR7ToWaYcOHNja0tiR%&g+z;g25j6%K=49n&+S zuC%c1(zzrqYEIAi8iEn6`SD}xm^S@PrxlGi1d2H(AC7YPk1LCi48 z3?Ev;7jAzBEK2wfE!;ws8`TTMu#WIsZllvPZRadvt;Q;k$!B5hx!IK}o*;3j;{;tZPM%cgcv* z9B_ol?P0tGUHyqbsaG0tsTN%1N|n%SGz@mqP6H!3cxSZnSjAS(k(zr?%|2+2A)x$Z zrBo^Wj;#Fh&MHSmaUoPs{}oi5E*`#(Rcfh_Yk~PlVz$kC{8JYEZt(MvWOeyh}Zr8)E5Y&u2X=b{_mj^ zFaP$R=!Am-KOGr$8AJQO7&jSUiViYp`__XSa0bOt&@uP@Im+-P$QP7G#vB!@%{zG2 zX{m$1!x6XiLG4@Nwz3)sw0Bw^?(n{T@I(ogXQ^8w7>-4G#vsD$dw>g z4dqhyysvq42ESQ|jAD!kSMzek9YbI;E5ZduvH}hUOT06!W|nxP4f0&nL39^d&h(?2 z=$h98!0zP{ADClsSktK%dRAK)52Cc9N9Qyc zk|zeS#K6UdIN1`IeqEGPa+CWjh-O~l3pcF1gqzdmt8xg?WGHHa=*1wTN|3_WTY6vc z&#N&@#91gQwHd;*pZ(NmlImO>`Nz+_L#xTfofFg?=zz$G2QQ`*rg_O2efzX8Nj;+~ z!nN)eNfGHR9zwh13U3*RAkcPJmy^L2I;MY)%Zi3zl)tZN!tx2Px;_zA3A=KzMy$g7 zK2O&#Zt9dN1fmE|2$gI8Up|?4+Ho8&6I~f$ajce48AWjiqrFx?wU$%+JI$ThBo(P# z@;~msdGWpuMO%moVDO7RV*=O-aR!O82Oq!p2f7;pfT$Air7c1?%@fVZoEJqdEhiZai zZTxX&%v6R_vO1s|$SEbJa=*(;^q~C86+9xBQD7#X;l_)`bNpfdMISp9r5wA2f^P_u zTF48PX}f|;dbUc?sfC-EPH3z>D+~ZfX_J zGEp_>q3k$q8KayOiOgMW9YRkBhYgwAo}ub{iB@ye1TK+=Bb5o^ zV>DS5RMMR+PPYoSVl$chcWHFItc51kt|Xkpa|jJ#7HKN*G)z)KiLr&=ZxmGd@O`=j zX@;N+4gri*+^JX!9<0yEUxk)&53NxspXH6`+#SF9fNnw7(w{{Kno0Dr32(Hu)GZk< zIW|rCjdfRCsxaDS9rPrsKW{#7BYsKCmiCP;Cmp?Z-)@6QWctF3C9t+Zo zDmi3;$&_3q+pk41;$k6@&-yS{=hf_KtR;!HtssN2emkUVtCH!d%< zS#xxgQ{l7R;K#Lh_lQyx)3}Xsai$3acXlmu{0Bv7;u3Hyx|q~bF<`|FS@f87Vq2&@ z?<;NWepg<*`PB&=5#ibmO7T6gjPT6=QP$%@^f>W^lZd$g_x}7JJ5WtL{|&`|tUjTR zz)uio+jnQkQrny!Df3YmINcf;v#RhO!R7fcY1#@dE}!b%&$R%(KYpG>2ML%gO8W%2 zkYqvG<1O*>VDwA_ES-c7!PZEg*g}3lX13BHGWcJ+}MsLl@n%AFDB5p@w}+2f`^L^@YywNW}Mm z+2ejAJgT84YD0((=J|FE=eD#N2w7>;2Zf3UrYrucFLlsSCpQBF9c`PG;jR67*nU16 zjBPyiL`#H>k<4FlizsVcmFyN*6JA=ugXdeO>hE3dcO({YTtQ_EIV0xjN+R9@`M_)2 zBn=O>n%JfhYfLYcOV?FaEe6CA^chMO$|W5T#rnF~1`k{KIubFqm3F%`%8`af?6r`_ zIm%IzF(!l~OXd74=9Paprr`itUmN{Iu|<8e9*&K0<+N30zd>HH%E)2!8J%e>`yl3+ zvR+*n4^>DP6Q~k2={y!;XaqBu$L6lRs^-4udJ(jvHnjJWN4cZO)no>EsaRC>!qX0_ zT6HwPC(QA}qB;Z*QJQx<#WL2~Te*F;oDFaK6gs7Gw9XNgZmquTOiwrSi3?hv=2&>i zL`s(Uz3gcpE|#zY#)Q{BSzpyAzt5NSj*YtJ$+%xj2qKAzfDtDssy;KVP_6wbvDW$z z6Zo$>emmxg@kS~U!-+Z?QO|29KUMAYV z0P4#jtsiAVzAAa$(PZck$a5gl&-RP9F!UD5;GwIZnR_?bm z8W(kETn@Xu8!$WXB&op}xPdJuv`Rqpo5*CN$FT4GsxfG=bGWd0C9%$zU1#5dI z{jWL>X%C70R~@JPf7J2+j1zD?@PFAqa6vfazgu~)BQYe-Uq=c4I(n&shMF&cLVb&X zZ>b_E@w2_@L>ctYCSUUpG5GiBUNpH7Q!!{CkPni9?@M#AQmF&E7z#zk~ z4-LY*nacFpu*X+Q(D=tmexM?_ayq&Xr?=&4zzBE&FP57u5E;#UitbW~oUz-F4kWr4 zP%7|>t;PETgsR;|sgma}nIfRguA9M7JNWv{#nf|;nS-48lL0`33sF?D)$%fz&KhH#nXFrdy-$K1JrCCG-ADoLX7AzgNdZ}O|PD7OiZ&NJdH zeWX#H$mko@3X1XYfZ`@3R+HMV?wx2nm{=Mevz-^YT8-jO|T1NV(o9a*vVGefBX54}zy53c~>s`>6>xA?h z6V!DtObFAW9j~p-?xwk9TJzQXD%!7Y%a`Hmjr(*r4eO+O53tw)M(omP&Cenn1a3=#4Y7(%r-R74c|~2mqqJ-t&PPB(X{|~9 zn)Tb^FFuApYNYd4gF)-hU*pHO@Hb-$^^h>t|A5;P!rUSV+p=}>w<6<{_dxeduO5e9 zZ#npRX$rCr*-^=ddwTQx$T|gCIqTOq_tXB$cEA{(uez+$F#CNmOhLW))YYtqS(MK~ zjsr`f&vKfuihuk2D>d!12Wcw9z4^Dk=TK-f1d2!oGB7R*Z4(d{jpJfcC$*BY{?C3h zSpoZKZ#4=x29m5Hbv-_RS888w>wgJi0>r-raXSmHq_EPS1Egn@Avv$i zoJue)bXzHCOM8)kS{lWJYu`$5HVRL2+}|#G!`aB{H)B~s`WbZxz=kS+jQ;1s=DXsZ z72SQN@t<11oJNH{8R0_SeX| zv{f}s>;NpReSw#p!Jp|dV6mRi>=1-gwW#jXAj- zSGZhIz3CqIw)Ay8wkfgoPIW~4iS~y1vlR8DHqqlDhU>AU5x0^)8(N9EuIq)6K{R6> zRRISKhNhFFSH*}!8G%%Qa!-bFMfw?ry`LjYaLr-R*kx9|=Uy&(tIJ*k1E935Jz@9| zo2V`=#SU`X55Ye>oNj4F;c5~9tMRDV_p?fy4h>w~;YeybfnUad?pxbvr`U?7r(wcN zqJJpWd^UF?>vrEj>7v~X?pJ8^F_G?>nWC?jU?3RuTLjf&dC~T_VfNArGtd9+8sRV zMxcS^k)i!|`s}v*? z(U`(Ad6#io5yR%ZMkAf_ zrBr~GGlqMPGnlIH21_IOw@Y;9UKmq6dK7*W-7+7!oSTFq-$lv=rnr%P`xjk)WpF9Y z!JCr?DZ~EPmi@n@^WXjO-wc_*AOj+ro{r&TzXSJKV|3OJhoJ;p**1IHYu;wV;wmIm zN~jALFV$)4f9WPX$q!FMO|h9pyY@1yL$L|3bj1p?DTMPV>kfTtU9UsmJ<$c_`P1`f z*+YgRI?pE0oOHk*s@6c)*cw_JyHd6BU{z@A0$iH~M?l}2n?~iRLnynXLmAexwGQLY zsryWN!0-edCYOj2v*n+#dDmliikEHFG>{vz`k>1>@ z;uZ~37-9z5I2UW6X8isTCP9G}aVbNL>*~#pCmV3}w^51?sKwdH5k6d%Zo@ovoNU)# zh>?J^Jb?yBFY>kv)6nU)Pn32reUrzAW}kr}MN}(WAxi1|p;-)RJY5rdri&d41A6GB z2}agN(z#;@vS6?T(^V;jZ*7q2*D~Pc+a|n8kVs|a?TDe55o(IXCk!k{B6=AWeBJA9ur;2KHC5z4Xs z>M~zaQf_PScO`wwd2$GkOYU!YT(9eK2{CRe;V~<0Hf-}k<6-;r)g8V~os{x;tzlnz9-taLLa^6?hNc_5hr;}HGCmxj2T|v^gWt~h( zm1h;Eq)yM3Y3aGR^TLO$6`34`e&GJB+tbTs4u;FWr)3F4WZx;BOE8o*!JjOb3&WF; zt@(xN@=xyg79KA`9sY>hW%_K3u-#7cJ3)_ZCkK2)r!M ztqki~W_ZuQu8t9x5e?*($vB1FtRGvZIM=v#`p~23=pYOu;cRLxJ-wshJ<28K%PSK| zupH{u{tDW2xM}pZI^`5K6)IZ5-#HWz(+AV!0KwzFwyc&Po`pl}Zk1(gv(dp69b#pV zJ@E!uq__YVi7zvm)Jddnko7sE(-$~=MSr+%JxuWsRH+?^lz_|z-`ag4fF5xvVsybB z190$OGW%nYw8nUzBU(=k&udt;y>W`P#Z;s)4R}P>}Wu-Eo{k( zx?z*={een|hvV4W%!z{hJrLmut{G6#f_n4sq4Q5a{5LwBy}%C!&h&|p9osrBjQuMW zpIGCC{buS1WHEmS_kF8ClCPi>rk=}V*lU`CpI8Zd1Ih9)xas;wk(Trp%WTOWPGQAu zV<2aajW47&Wu;^zdjP9zWBh{fjNIMtx*K?Ci6|?oTVM7P>p7%tHkO!%kYETu;x9q* zU*~+|e_tRYHJax=riB)T`zT*xgiWsH?}k@}zEsD^%<0IbFDb&ar*Ec+X^O8foDVy* z51X1Dk_mL7OWkmFS`M-qQOsbsA-OFKl)q2;aYrjkh&B!~fh4Ei?~lzSwMvoq8+mlG zH=GwDTFvKq^A?$Q9Vt^d+fz#kQUgGH8#VI@hekYP6q!b7dvXoSGhb(#{Mk1Z$RWa=!0}CjZ1H&A~ zR@bC@wDy$W0$=;5Fr*d6Nenp!w-lnXCaeF~#vY1Bp;t->&BIXm4kp&GDC(l;C$m&7 zL%pOdlJemgXJsoPa$k!D{fHT$-=P3rte8&de4@w5UBAc}fQb#hc9B9NdWjV^BGDswUnPvITU(sj;YPLPY$zAkw8L@HhgJM9+Cvky!3M8gZt_y z>E|gnA-fDqEnl28`799m_sAQaY5SCOIM7T=tGUU3Zb5H$iqm-S<=f436m(;=DpQbQ zE*GI(irVkp@*9E=4QKS6OBwv3XSa)9V@e5{CIlbQ-St+*tFz4_fr8>pe;no2-I_)} zNf#FQpS5)HOS(JVycKmVD7x41W@t_x1G^6qzZ9QLL59f}e)BrnydrrwbIa)Fa^j6{ zLKb8bsjsrTXvVfimFlSM<90hW^n{nqQ1byE1i^xlY7H z`^U~fOI(9i2sKZz&Ut^x71ZY4Td0fW0mVUD-7iEDEUlu9n+0srC+bi}xMRd}XUd^6 zoj~$}-EPv6a+W5X8&1nUtljKBW(fDi(Tpo{da#UH_J>8b@ZzBNwA^IsBAl_zO7uOG z?E`vX^yA)|I0lDW>0SQ>nYtpyf}qNY8&Iv8KqIpk@2A?r|{E zipT;7vs7Ph!X1}}mGuUAUU51q*%Hx|z5Zr_L3wI%sUoN?y#6AmtugB@a3H9Zgdf>X z;Ou=PF#ks&gm4OU5J=GSXE1x9IKT-8C;72Q%xqBwxQMJIEL}H`H&Z%z9Y1RyA|GKs z4dN`OaozZ$XZtjPNOfh9C4nlxwQgRh!@1yjPnPH#Qt%o<7Rj+YcXq)!hBXh|io(Aj zLA2V7Gt9kXZMV+>&R`iYvVfa9O0wjmd{81_{v5B;3LAFeWH|uBEXRdbE|iS zh-@yW7}4?jE_FtRr+pUsEMDZaP|I1??!a=@Ccb@WN6NI8KlPQ@E*yRCR*$d>+R}O7 zVfzVogSjlR>JDwNIeQO6zG`QzV)93y6vsesaLGmQ@ z5pOx$>kum9pMPl>q<{J0f41=sd%O_S{U+M@fiN`LwXS`!g!%_I)tu#Mqsws&Vm3`q zYErw;HZkH<0@_#i%pLu}Ptb&AT0xz?%rVJXj}N&?bwYpH)Le~W$jERdmpjGl7%625 zahqH6>8O#4$`dt2mVGto8KS&qXL6L|$B@oJY*Rmlq~oTv4E&na@t;H0>UGV}d$A<* zxh&f+U==4RlT$8TNFK%g&**ySdUDDBO6zM_<={#OYnE`bRNfJy$bdmH1rr96K-qev znRX#YPSPpI3*5U3qh%qm;sLW2%u^H%!{c~o4P>i1gkFz?OJ;sh_6~ZWT2ieJg(LN3 zg1Sl|r|y!vGSpi}0gBfh{-(N8PK)eyL1NpG;asIFJzqj&zD%F};*Qi~=dR8p<>0xDn$4ljeeofoZT-RlgFHYz=B0Y$uX(Ccj5D)@NYFF3&n(!704>fHP>1NeG z1mVQ6RvSFr2|&I!;FSFze)xZnlmF(2|BjXa>W7QT%!2MG#wyXns9wIL2+DHV&GvI;;3)c;1c{@~01>%F19s{z(k9BSDa4)>1WE{0pKIa~wvO4MT6efB02 zF70(6irLYsB=%8gY-u(3 zf{GPF}~q`rqK|LCIPh~sRj zAZ4sL#p@DImjeA!-^fl@17f zzGUeVF=I7*4o`|jRCjL3Z`Y@ zLsIt{uJ2|fnnm3k{%j+{x6)AZ_mcyBQ2kvhg7C=E3^!vvKe$8j zH^_qE10RIHVPgqXRRI%y?xin>KNw2z4Jn&dCW&qAN~G^G;7_*CidwV0Y%!y5i!+PR z(6olA|F^>*P7YYgO=UAYu=m2||zhdu(L`rD08ILloGMux+DYyvkE7lDgU^ z`t?pV{7nwxD~7{W>!1N^WXP4%$c21fBwh%othtJL$1h#O0l;iK@GKw*FUx9I_~cA|jb6&vmqrO5 z4Rcobxw7-2@{NKbmhR0F{Uv7_Zy_GT>Z-OGa0jGRLk 2 + + if version.startswith('XCRYPT_'): + # Prefer GLIBC_* versioned symbols over XCRYPT_* ones. Set the version to + # something > MAX_ALLOWED_GLIBC_VERSION so this symbol will not be picked. + version = [float('inf')] + else: + match = re.match(VERSION_PATTERN, version) + # Ignore symbols versioned with GLIBC_PRIVATE. + if not match: + continue + version = [int(part) for part in match.group(1).split('.')] + + if version < MAX_ALLOWED_GLIBC_VERSION: + old_supported_version = supported_version.get(base_name, ([-1], -1)) + supported_version[base_name] = max((version, index), old_supported_version) + if is_default: + default_version[base_name] = (version, index) + +# Get the offset into the binary of the .gnu.version section from readelf. +stdout = subprocess.check_output(['readelf', '--sections', '--wide', BIN_FILE]) +for line in stdout.decode("utf-8").split('\n'): + if match := SECTION_PATTERN.match(line): + section_name, address = match.groups() + if section_name == '.gnu.version': + gnu_version_addr = int(address, base=16) + break +else: + print('No .gnu.version section found', file=sys.stderr) + sys.exit(1) + +# Rewrite the binary. +bin_data = bytearray(open(BIN_FILE, 'rb').read()) +for name, (version, index) in default_version.items(): + # No need to rewrite the default if it's already an allowed version. + if version <= MAX_ALLOWED_GLIBC_VERSION: + continue + + if name in SYMBOL_ALLOWLIST: + continue + elif name in supported_version: + _, supported_index = supported_version[name] + else: + supported_index = -1 + + # The .gnu.version section is divided into 16-bit chunks that give the + # symbol versions. The 16th bit is a flag that's false for the default + # version. The data is stored in little-endian so we need to add 1 to + # get the address of the byte we want to flip. + # + # Disable the unsupported symbol. + old_default = gnu_version_addr + 2 * index + 1 + assert (bin_data[old_default] & 0x80) == 0 + bin_data[old_default] ^= 0x80 + + # If we found a supported version, enable that as default. + if supported_index != -1: + new_default = gnu_version_addr + 2 * supported_index + 1 + assert (bin_data[new_default] & 0x80) == 0x80 + bin_data[new_default] ^= 0x80 + +open(BIN_FILE, 'wb').write(bin_data) diff --git a/build/linux/sysroot_scripts/sysroot-creator-sid.sh b/build/linux/sysroot_scripts/sysroot-creator-bullseye.sh similarity index 73% rename from build/linux/sysroot_scripts/sysroot-creator-sid.sh rename to build/linux/sysroot_scripts/sysroot-creator-bullseye.sh index 86c311cb621b..3f40e809580d 100755 --- a/build/linux/sysroot_scripts/sysroot-creator-sid.sh +++ b/build/linux/sysroot_scripts/sysroot-creator-bullseye.sh @@ -1,31 +1,39 @@ #!/bin/bash -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2022 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" DISTRO=debian -DIST=sid +DIST=bullseye -# Keep the "experimental" repo before the "sid" repo. There are some packages -# that are currently only available in experimental like libgtk-4-1, but if it -# were to be placed first, experimental (unreleased) versions of other packages -# like libc6 would take precedence over the sid (released) versions. While this -# may be useful for certain kinds of development, the standard sysroots should -# continue to be shipped only with released packages. -# Also keep "stretch" before "sid". For now, it's needed to bring back -# libgnome-keyring-dev which has since been deprecated and removed from sid. -# It will be needed until gnome keyring is removed (http://crbug.com/466975 and -# http://crbug.com/355223). -ARCHIVE_URL="https://snapshot.debian.org/archive/debian" -ARCHIVE_TIMESTAMP=20210309T203820Z -APT_SOURCES_LIST="${ARCHIVE_URL}/${ARCHIVE_TIMESTAMP}/ stretch main -${ARCHIVE_URL}/${ARCHIVE_TIMESTAMP}/ experimental main -${ARCHIVE_URL}/${ARCHIVE_TIMESTAMP}/ sid main" +# This number is appended to the sysroot key to cause full rebuilds. It +# should be incremented when removing packages or patching existing packages. +# It should not be incremented when adding packages. +SYSROOT_RELEASE=1 -# gpg keyring file generated using generate_debian_archive_unstable.sh -KEYRING_FILE="${SCRIPT_DIR}/debian_archive_unstable.gpg" +ARCHIVE_TIMESTAMP=20230329T085712Z +ARCHIVE_URL="https://snapshot.debian.org/archive/debian/$ARCHIVE_TIMESTAMP/" +APT_SOURCES_LIST=( + # Debian 12 (Bookworm) is needed for GTK4. It should be kept before bullseye + # so that bullseye takes precedence. + "${ARCHIVE_URL} bookworm main" + "${ARCHIVE_URL} bookworm-updates main" + + # Debian 9 (Stretch) is needed for gnome-keyring. It should be kept before + # bullseye so that bullseye takes precedence. + "${ARCHIVE_URL} stretch main" + "${ARCHIVE_URL} stretch-updates main" + + # This mimicks a sources.list from bullseye. + "${ARCHIVE_URL} bullseye main contrib non-free" + "${ARCHIVE_URL} bullseye-updates main contrib non-free" + "${ARCHIVE_URL} bullseye-backports main contrib non-free" +) + +# gpg keyring file generated using generate_keyring.sh +KEYRING_FILE="${SCRIPT_DIR}/keyring.gpg" HAS_ARCH_AMD64=1 HAS_ARCH_I386=1 @@ -36,15 +44,9 @@ HAS_ARCH_MIPS=1 HAS_ARCH_MIPS64EL=1 # Sysroot packages: these are the packages needed to build chrome. -# NOTE: When DEBIAN_PACKAGES is modified, the packagelist files must be updated -# by running this script in GeneratePackageList mode. DEBIAN_PACKAGES="\ comerr-dev krb5-multidev - libappindicator-dev - libappindicator1 - libappindicator3-1 - libappindicator3-dev libasound2 libasound2-dev libasyncns0 @@ -59,6 +61,7 @@ DEBIAN_PACKAGES="\ libaudit1 libavahi-client3 libavahi-common3 + libb2-1 libblkid-dev libblkid1 libbluetooth-dev @@ -75,15 +78,18 @@ DEBIAN_PACKAGES="\ libcap-dev libcap-ng0 libcap2 + libcloudproviders0 libcolord2 libcom-err2 + libcrypt-dev + libcrypt1 libcups2 libcups2-dev libcupsimage2 libcupsimage2-dev - libdatrie-dev libcurl3-gnutls libcurl4-gnutls-dev + libdatrie-dev libdatrie1 libdb5.3 libdbus-1-3 @@ -93,7 +99,9 @@ DEBIAN_PACKAGES="\ libdbusmenu-glib4 libdbusmenu-gtk3-4 libdbusmenu-gtk4 + libdeflate-dev libdeflate0 + libdouble-conversion3 libdrm-amdgpu1 libdrm-dev libdrm-nouveau2 @@ -113,7 +121,6 @@ DEBIAN_PACKAGES="\ libexpat1 libexpat1-dev libffi-dev - libffi6 libffi7 libflac-dev libflac8 @@ -126,7 +133,7 @@ DEBIAN_PACKAGES="\ libgbm-dev libgbm1 libgcc-10-dev - libgcc1 + libgcc-s1 libgcrypt20 libgcrypt20-dev libgdk-pixbuf-2.0-0 @@ -164,6 +171,8 @@ DEBIAN_PACKAGES="\ libgssrpc4 libgtk-3-0 libgtk-3-dev + libgtk-4-1 + libgtk-4-dev libgtk2.0-0 libgudev-1.0-0 libharfbuzz-dev @@ -177,10 +186,9 @@ DEBIAN_PACKAGES="\ libidl-2-0 libidn11 libidn2-0 - libindicator3-7 - libindicator7 libinput-dev libinput10 + libjbig-dev libjbig0 libjpeg62-turbo libjpeg62-turbo-dev @@ -197,11 +205,13 @@ DEBIAN_PACKAGES="\ libkrb5support0 liblcms2-2 libldap-2.4-2 + liblerc4 libltdl7 liblz4-1 liblzma5 liblzo2-2 libmd0 + libmd4c0 libminizip-dev libminizip1 libmount-dev @@ -240,7 +250,6 @@ DEBIAN_PACKAGES="\ libpcre2-32-0 libpcre2-8-0 libpcre2-dev - libpcre2-posix0 libpcre2-posix2 libpcre3 libpcre3-dev @@ -252,11 +261,34 @@ DEBIAN_PACKAGES="\ libpixman-1-dev libpng-dev libpng16-16 + libproxy1v5 libpsl5 libpthread-stubs0-dev libpulse-dev libpulse-mainloop-glib0 libpulse0 + libqt5concurrent5 + libqt5core5a + libqt5dbus5 + libqt5gui5 + libqt5network5 + libqt5printsupport5 + libqt5sql5 + libqt5test5 + libqt5widgets5 + libqt5xml5 + libqt6concurrent6 + libqt6core6 + libqt6dbus6 + libqt6gui6 + libqt6network6 + libqt6opengl6 + libqt6openglwidgets6 + libqt6printsupport6 + libqt6sql6 + libqt6test6 + libqt6widgets6 + libqt6xml6 libre2-9 libre2-dev librest-0.7-0 @@ -281,17 +313,24 @@ DEBIAN_PACKAGES="\ libssl1.1 libstdc++-10-dev libstdc++6 + libsystemd-dev libsystemd0 libtasn1-6 libthai-dev libthai0 + libtiff-dev libtiff5 + libtiff6 + libtiffxx5 libtinfo6 libtirpc3 + libts0 libudev-dev libudev1 libunbound8 libunistring2 + libutempter-dev + libutempter0 libuuid1 libva-dev libva-drm2 @@ -301,19 +340,20 @@ DEBIAN_PACKAGES="\ libva2 libvorbis0a libvorbisenc2 - libvpx-dev - libvpx6 libvulkan-dev libvulkan1 libwacom2 + libwayland-bin libwayland-client0 libwayland-cursor0 libwayland-dev + libwayland-egl-backend-dev libwayland-egl1 libwayland-egl1-mesa libwayland-server0 libwebp-dev libwebp6 + libwebp7 libwebpdemux2 libwebpmux3 libwrap0 @@ -329,14 +369,31 @@ DEBIAN_PACKAGES="\ libxcb-dri3-dev libxcb-glx0 libxcb-glx0-dev + libxcb-icccm4 + libxcb-image0 + libxcb-image0-dev + libxcb-keysyms1 libxcb-present-dev libxcb-present0 + libxcb-randr0 + libxcb-randr0-dev + libxcb-render-util0 + libxcb-render-util0-dev libxcb-render0 libxcb-render0-dev + libxcb-shape0 + libxcb-shape0-dev libxcb-shm0 libxcb-shm0-dev + libxcb-sync-dev libxcb-sync1 + libxcb-util-dev + libxcb-util1 libxcb-xfixes0 + libxcb-xfixes0-dev + libxcb-xinerama0 + libxcb-xinput0 + libxcb-xkb1 libxcb1 libxcb1-dev libxcomposite-dev @@ -358,6 +415,7 @@ DEBIAN_PACKAGES="\ libxinerama-dev libxinerama1 libxkbcommon-dev + libxkbcommon-x11-0 libxkbcommon0 libxml2 libxml2-dev @@ -380,43 +438,30 @@ DEBIAN_PACKAGES="\ libzstd1 linux-libc-dev mesa-common-dev + qt6-base-dev + qt6-base-dev-tools + qtbase5-dev + qtbase5-dev-tools shared-mime-info - speech-dispatcher uuid-dev wayland-protocols - x11proto-composite-dev - x11proto-damage-dev x11proto-dev - x11proto-fixes-dev - x11proto-input-dev - x11proto-kb-dev - x11proto-randr-dev - x11proto-record-dev - x11proto-render-dev - x11proto-scrnsaver-dev - x11proto-xext-dev - x11proto-xinerama-dev zlib1g zlib1g-dev " DEBIAN_PACKAGES_AMD64=" - libgtk-4-1 - libgtk-4-dev - liblsan0 libtsan0 + liblsan0 " DEBIAN_PACKAGES_X86=" libasan6 - libcilkrts5 libdrm-intel1 - libgtk-4-1 - libgtk-4-dev libitm1 - libmpx2 libquadmath0 libubsan1 + valgrind " DEBIAN_PACKAGES_ARM=" @@ -426,9 +471,8 @@ DEBIAN_PACKAGES_ARM=" libdrm-freedreno1 libdrm-omap1 libdrm-tegra0 - libgtk-4-1 - libgtk-4-dev libubsan1 + valgrind " DEBIAN_PACKAGES_ARM64=" @@ -437,13 +481,12 @@ DEBIAN_PACKAGES_ARM64=" libdrm-freedreno1 libdrm-tegra0 libgmp10 - libgtk-4-1 - libgtk-4-dev libitm1 liblsan0 libthai0 libtsan0 libubsan1 + valgrind " DEBIAN_PACKAGES_ARMEL=" @@ -452,12 +495,11 @@ DEBIAN_PACKAGES_ARMEL=" libdrm-freedreno1 libdrm-omap1 libdrm-tegra0 - libgtk-4-1 - libgtk-4-dev libubsan1 " DEBIAN_PACKAGES_MIPS64EL=" + valgrind " . "${SCRIPT_DIR}/sysroot-creator.sh" diff --git a/build/linux/sysroot_scripts/sysroot-creator.sh b/build/linux/sysroot_scripts/sysroot-creator.sh index fda3de4e57fd..63d5baa1f90e 100644 --- a/build/linux/sysroot_scripts/sysroot-creator.sh +++ b/build/linux/sysroot_scripts/sysroot-creator.sh @@ -1,15 +1,15 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # # This script should not be run directly but sourced by the other -# scripts (e.g. sysroot-creator-sid.sh). Its up to the parent scripts +# scripts (e.g. sysroot-creator-bullseye.sh). Its up to the parent scripts # to define certain environment variables: e.g. # DISTRO=debian -# DIST=sid +# DIST=bullseye # # Similar in syntax to /etc/apt/sources.list -# APT_SOURCES_LIST="http://ftp.us.debian.org/debian/ sid main" -# KEYRING_FILE=debian-archive-sid-stable.gpg +# APT_SOURCES_LIST=( "http://ftp.us.debian.org/debian/ bullseye main" ) +# KEYRING_FILE=debian-archive-bullseye-stable.gpg # DEBIAN_PACKAGES="gcc libz libssl" #@ This script builds Debian/Ubuntu sysroot images for building Google Chrome. @@ -234,14 +234,13 @@ ExtractPackageXz() { sed "s|Filename: |Filename: ${repo}|" > "${dst_file}" } -GeneratePackageListDist() { +GeneratePackageListDistRepo() { local arch="$1" - set -- $2 - local repo="$1" - local dist="$2" - local repo_name="$3" + local repo="$2" + local dist="$3" + local repo_name="$4" - TMP_PACKAGE_LIST="${BUILD_DIR}/Packages.${dist}_${repo_name}_${arch}" + local tmp_package_list="${BUILD_DIR}/Packages.${dist}_${repo_name}_${arch}" local repo_basedir="${repo}/dists/${dist}" local package_list="${BUILD_DIR}/Packages.${dist}_${repo_name}_${arch}.${PACKAGES_EXT}" local package_file_arch="${repo_name}/binary-${arch}/Packages.${PACKAGES_EXT}" @@ -249,7 +248,20 @@ GeneratePackageListDist() { DownloadOrCopyNonUniqueFilename "${package_list_arch}" "${package_list}" VerifyPackageListing "${package_file_arch}" "${package_list}" ${repo} ${dist} - ExtractPackageXz "${package_list}" "${TMP_PACKAGE_LIST}" ${repo} + ExtractPackageXz "${package_list}" "${tmp_package_list}" ${repo} + cat "${tmp_package_list}" | ./merge-package-lists.py "${list_base}" +} + +GeneratePackageListDist() { + local arch="$1" + set -- $2 + local repo="$1" + local dist="$2" + shift 2 + while (( "$#" )); do + GeneratePackageListDistRepo "$arch" "$repo" "$dist" "$1" + shift + done } GeneratePackageListCommon() { @@ -257,13 +269,10 @@ GeneratePackageListCommon() { local arch="$2" local packages="$3" - local dists="${DIST} ${DIST_UPDATES:-}" - local list_base="${BUILD_DIR}/Packages.${DIST}_${arch}" > "${list_base}" # Create (or truncate) a zero-length file. - echo "${APT_SOURCES_LIST}" | while read source; do + printf '%s\n' "${APT_SOURCES_LIST[@]}" | while read source; do GeneratePackageListDist "${arch}" "${source}" - cat "${TMP_PACKAGE_LIST}" | ./merge-package-lists.py "${list_base}" done GeneratePackageList "${list_base}" "${output_file}" "${packages}" @@ -315,83 +324,32 @@ StripChecksumsFromPackageList() { HacksAndPatchesCommon() { local arch=$1 local os=$2 - local strip=$3 Banner "Misc Hacks & Patches" - # these are linker scripts with absolute pathnames in them - # which we rewrite here - lscripts="${INSTALL_ROOT}/usr/lib/${arch}-${os}/libpthread.so \ - ${INSTALL_ROOT}/usr/lib/${arch}-${os}/libc.so" - - # Rewrite linker scripts - sed -i -e 's|/usr/lib/${arch}-${os}/||g' ${lscripts} - sed -i -e 's|/lib/${arch}-${os}/||g' ${lscripts} - - # Unversion libdbus and libxkbcommon symbols. This is required because - # libdbus-1-3 and libxkbcommon0 switched from unversioned symbols to versioned - # ones, and we must still support distros using the unversioned library. This - # hack can be removed once support for Ubuntu Trusty and Debian Jessie are - # dropped. - ${strip} -R .gnu.version_d -R .gnu.version \ - "${INSTALL_ROOT}/lib/${arch}-${os}/libdbus-1.so.3" - cp "${SCRIPT_DIR}/libdbus-1-3-symbols" \ - "${INSTALL_ROOT}/debian/libdbus-1-3/DEBIAN/symbols" - - ${strip} -R .gnu.version_d -R .gnu.version \ - "${INSTALL_ROOT}/usr/lib/${arch}-${os}/libxkbcommon.so.0.0.0" - cp "${SCRIPT_DIR}/libxkbcommon0-symbols" \ - "${INSTALL_ROOT}/debian/libxkbcommon0/DEBIAN/symbols" + + # Remove an unnecessary dependency on qtchooser. + rm "${INSTALL_ROOT}/usr/lib/${arch}-${os}/qt-default/qtchooser/default.conf" # libxcomposite1 is missing a symbols file. cp "${SCRIPT_DIR}/libxcomposite1-symbols" \ "${INSTALL_ROOT}/debian/libxcomposite1/DEBIAN/symbols" - # Shared objects depending on libdbus-1.so.3 have unsatisfied undefined - # versioned symbols. To avoid LLD --no-allow-shlib-undefined errors, rewrite - # DT_NEEDED entries from libdbus-1.so.3 to a different string. LLD will - # suppress --no-allow-shlib-undefined diagnostics for such shared objects. - set +e - for f in "${INSTALL_ROOT}/lib/${arch}-${os}"/*.so \ - "${INSTALL_ROOT}/usr/lib/${arch}-${os}"/*.so; do - echo "$f" | grep -q 'libdbus-1.so$' && continue - # In a dependent shared object, the only occurrence of "libdbus-1.so.3" is - # the string referenced by the DT_NEEDED entry. - offset=$(LANG=C grep -abo libdbus-1.so.3 "$f") - [ -n "$offset" ] || continue - echo -n 'libdbus-1.so.0' | dd of="$f" conv=notrunc bs=1 \ - seek="$(echo -n "$offset" | cut -d : -f 1)" status=none - done - set -e - - # Glibc 2.27 introduced some new optimizations to several math functions, but - # it will be a while before it makes it into all supported distros. Luckily, - # glibc maintains ABI compatibility with previous versions, so the old symbols - # are still there. - # TODO(thomasanderson): Remove this once glibc 2.27 is available on all - # supported distros. - local math_h="${INSTALL_ROOT}/usr/include/math.h" - local libm_so="${INSTALL_ROOT}/lib/${arch}-${os}/libm.so.6" - nm -D --defined-only --with-symbol-versions "${libm_so}" | \ - "${SCRIPT_DIR}/find_incompatible_glibc_symbols.py" >> "${math_h}" - - # glob64() was also optimized in glibc 2.27. Make sure to choose the older - # version. - local glob_h="${INSTALL_ROOT}/usr/include/glob.h" - local libc_so="${INSTALL_ROOT}/lib/${arch}-${os}/libc.so.6" - nm -D --defined-only --with-symbol-versions "${libc_so}" | \ - "${SCRIPT_DIR}/find_incompatible_glibc_symbols.py" >> "${glob_h}" + # __GLIBC_MINOR__ is used as a feature test macro. Replace it with the + # earliest supported version of glibc (2.26, obtained from the oldest glibc + # version in //chrome/installer/linux/debian/dist_packag_versions.json and + # //chrome/installer/linux/rpm/dist_package_provides.json). + local usr_include="${INSTALL_ROOT}/usr/include" + local features_h="${usr_include}/features.h" + sed -i 's|\(#define\s\+__GLIBC_MINOR__\)|\1 26 //|' "${features_h}" # fcntl64() was introduced in glibc 2.28. Make sure to use fcntl() instead. local fcntl_h="${INSTALL_ROOT}/usr/include/fcntl.h" sed -i '{N; s/#ifndef __USE_FILE_OFFSET64\(\nextern int fcntl\)/#if 1\1/}' \ "${fcntl_h}" - # On i386, fcntl() was updated in glibc 2.28. - nm -D --defined-only --with-symbol-versions "${libc_so}" | \ - "${SCRIPT_DIR}/find_incompatible_glibc_symbols.py" >> "${fcntl_h}" - # __GLIBC_MINOR__ is used as a feature test macro. Replace it with the - # earliest supported version of glibc (2.17, https://crbug.com/376567). - local features_h="${INSTALL_ROOT}/usr/include/features.h" - sed -i 's|\(#define\s\+__GLIBC_MINOR__\)|\1 17 //|' "${features_h}" + # Do not use pthread_cond_clockwait as it was introduced in glibc 2.30. + local cppconfig_h="${usr_include}/${arch}-${os}/c++/10/bits/c++config.h" + sed -i 's|\(#define\s\+_GLIBCXX_USE_PTHREAD_COND_CLOCKWAIT\)|// \1|' \ + "${cppconfig_h}" # This is for chrome's ./build/linux/pkg-config-wrapper # which overwrites PKG_CONFIG_LIBDIR internally @@ -402,37 +360,56 @@ HacksAndPatchesCommon() { } +ReversionGlibc() { + local arch=$1 + local os=$2 + + # Avoid requiring unsupported glibc versions. + "${SCRIPT_DIR}/reversion_glibc.py" \ + "${INSTALL_ROOT}/lib/${arch}-${os}/libc.so.6" + "${SCRIPT_DIR}/reversion_glibc.py" \ + "${INSTALL_ROOT}/lib/${arch}-${os}/libm.so.6" + "${SCRIPT_DIR}/reversion_glibc.py" \ + "${INSTALL_ROOT}/lib/${arch}-${os}/libcrypt.so.1" +} + + HacksAndPatchesAmd64() { - HacksAndPatchesCommon x86_64 linux-gnu strip + HacksAndPatchesCommon x86_64 linux-gnu + ReversionGlibc x86_64 linux-gnu } HacksAndPatchesI386() { - HacksAndPatchesCommon i386 linux-gnu strip + HacksAndPatchesCommon i386 linux-gnu + ReversionGlibc i386 linux-gnu } HacksAndPatchesARM() { - HacksAndPatchesCommon arm linux-gnueabihf arm-linux-gnueabihf-strip + HacksAndPatchesCommon arm linux-gnueabihf + ReversionGlibc arm linux-gnueabihf } HacksAndPatchesARM64() { - # Use the unstripped libdbus for arm64 to prevent linker errors. - # https://bugs.chromium.org/p/webrtc/issues/detail?id=8535 - HacksAndPatchesCommon aarch64 linux-gnu true + HacksAndPatchesCommon aarch64 linux-gnu + ReversionGlibc aarch64 linux-gnu } HacksAndPatchesARMEL() { - HacksAndPatchesCommon arm linux-gnueabi arm-linux-gnueabi-strip + HacksAndPatchesCommon arm linux-gnueabi + ReversionGlibc arm linux-gnueabi } HacksAndPatchesMips() { - HacksAndPatchesCommon mipsel linux-gnu mipsel-linux-gnu-strip + HacksAndPatchesCommon mipsel linux-gnu + ReversionGlibc mipsel linux-gnu } HacksAndPatchesMips64el() { - HacksAndPatchesCommon mips64el linux-gnuabi64 mips64el-linux-gnuabi64-strip + HacksAndPatchesCommon mips64el linux-gnuabi64 + ReversionGlibc mips64el linux-gnuabi64 } @@ -473,8 +450,9 @@ InstallIntoSysroot() { dpkg-deb -e ${package} ${INSTALL_ROOT}/debian/${base_package}/DEBIAN done - # Prune /usr/share, leaving only pkgconfig. - ls -d ${INSTALL_ROOT}/usr/share/* | grep -v "/pkgconfig$" | xargs rm -r + # Prune /usr/share, leaving only pkgconfig, wayland, and wayland-protocols. + ls -d ${INSTALL_ROOT}/usr/share/* | \ + grep -v "/\(pkgconfig\|wayland\|wayland-protocols\)$" | xargs rm -r } @@ -497,14 +475,18 @@ CleanupJailSymlinks() { ln -snfv "${prefix}${target}" "${link}" done - find $libdirs -type l -printf '%p %l\n' | while read link target; do + failed=0 + while read link target; do # Make sure we catch new bad links. if [ ! -r "${link}" ]; then echo "ERROR: FOUND BAD LINK ${link}" ls -l ${link} - exit 1 + failed=1 fi - done + done < <(find $libdirs -type l -printf '%p %l\n') + if [ $failed -eq 1 ]; then + exit 1 + fi cd "$SAVEDPWD" } @@ -581,8 +563,8 @@ BuildSysrootAmd64() { local files_and_sha256sums="$(cat ${package_file})" StripChecksumsFromPackageList "$package_file" InstallIntoSysroot ${files_and_sha256sums} - CleanupJailSymlinks HacksAndPatchesAmd64 + CleanupJailSymlinks VerifyLibraryDepsAmd64 CreateTarBall } @@ -601,8 +583,8 @@ BuildSysrootI386() { local files_and_sha256sums="$(cat ${package_file})" StripChecksumsFromPackageList "$package_file" InstallIntoSysroot ${files_and_sha256sums} - CleanupJailSymlinks HacksAndPatchesI386 + CleanupJailSymlinks VerifyLibraryDepsI386 CreateTarBall } @@ -621,8 +603,8 @@ BuildSysrootARM() { local files_and_sha256sums="$(cat ${package_file})" StripChecksumsFromPackageList "$package_file" InstallIntoSysroot ${files_and_sha256sums} - CleanupJailSymlinks HacksAndPatchesARM + CleanupJailSymlinks VerifyLibraryDepsARM CreateTarBall } @@ -641,8 +623,8 @@ BuildSysrootARM64() { local files_and_sha256sums="$(cat ${package_file})" StripChecksumsFromPackageList "$package_file" InstallIntoSysroot ${files_and_sha256sums} - CleanupJailSymlinks HacksAndPatchesARM64 + CleanupJailSymlinks VerifyLibraryDepsARM64 CreateTarBall } @@ -661,8 +643,8 @@ BuildSysrootARMEL() { local files_and_sha256sums="$(cat ${package_file})" StripChecksumsFromPackageList "$package_file" InstallIntoSysroot ${files_and_sha256sums} - CleanupJailSymlinks HacksAndPatchesARMEL + CleanupJailSymlinks VerifyLibraryDepsARMEL CreateTarBall } @@ -681,8 +663,8 @@ BuildSysrootMips() { local files_and_sha256sums="$(cat ${package_file})" StripChecksumsFromPackageList "$package_file" InstallIntoSysroot ${files_and_sha256sums} - CleanupJailSymlinks HacksAndPatchesMips + CleanupJailSymlinks VerifyLibraryDepsMips CreateTarBall } @@ -701,8 +683,8 @@ BuildSysrootMips64el() { local files_and_sha256sums="$(cat ${package_file})" StripChecksumsFromPackageList "$package_file" InstallIntoSysroot ${files_and_sha256sums} - CleanupJailSymlinks HacksAndPatchesMips64el + CleanupJailSymlinks VerifyLibraryDepsMips64el CreateTarBall } @@ -879,21 +861,26 @@ GeneratePackageList() { /bin/rm -f "${output_file}" shift shift + local failed=0 for pkg in $@ ; do local pkg_full=$(grep -A 1 " ${pkg}\$" "$input_file" | \ egrep "pool/.*" | sed 's/.*Filename: //') if [ -z "${pkg_full}" ]; then - echo "ERROR: missing package: $pkg" - exit 1 - fi - local sha256sum=$(grep -A 4 " ${pkg}\$" "$input_file" | \ - grep ^SHA256: | sed 's/^SHA256: //') - if [ "${#sha256sum}" -ne "64" ]; then - echo "Bad sha256sum from Packages" - exit 1 + echo "ERROR: missing package: $pkg" + local failed=1 + else + local sha256sum=$(grep -A 4 " ${pkg}\$" "$input_file" | \ + grep ^SHA256: | sed 's/^SHA256: //') + if [ "${#sha256sum}" -ne "64" ]; then + echo "Bad sha256sum from Packages" + local failed=1 + fi + echo $pkg_full $sha256sum >> "$output_file" fi - echo $pkg_full $sha256sum >> "$output_file" done + if [ $failed -eq 1 ]; then + exit 1 + fi # sort -o does an in-place sort of this file sort "$output_file" -o "$output_file" } @@ -935,13 +922,21 @@ PrintDistro() { } #@ -#@ DumpRelease +#@ PrintRelease #@ -#@ Prints disto release. eg: jessie +#@ Prints disto release. eg: bullseye PrintRelease() { echo ${DIST} } +#@ +#@ PrintKey +#@ +#@ Prints sysroot key identifier. +PrintKey() { + echo "${ARCHIVE_TIMESTAMP}-${SYSROOT_RELEASE}" +} + RunCommand() { SetEnvironmentVariables "$1" SanityCheck diff --git a/build/linux/sysroot_scripts/sysroots.json b/build/linux/sysroot_scripts/sysroots.json index 6248db7d9a32..02004260f7b8 100644 --- a/build/linux/sysroot_scripts/sysroots.json +++ b/build/linux/sysroot_scripts/sysroots.json @@ -1,37 +1,44 @@ { - "sid_amd64": { - "Sha1Sum": "43a87bbebccad99325fdcf34166295b121ee15c7", - "SysrootDir": "debian_sid_amd64-sysroot", - "Tarball": "debian_sid_amd64_sysroot.tar.xz" + "bullseye_amd64": { + "Key": "20230329T085712Z-1", + "Sha1Sum": "f5f68713249b52b35db9e08f67184cac392369ab", + "SysrootDir": "debian_bullseye_amd64-sysroot", + "Tarball": "debian_bullseye_amd64_sysroot.tar.xz" }, - "sid_arm": { - "Sha1Sum": "11d6f690ca49e8ba01a1d8c5346cedad2cf308fd", - "SysrootDir": "debian_sid_arm-sysroot", - "Tarball": "debian_sid_arm_sysroot.tar.xz" + "bullseye_arm": { + "Key": "20230329T085712Z-1", + "Sha1Sum": "f80db01d7b3b973ca2aeeed000aa43bd0c082f15", + "SysrootDir": "debian_bullseye_arm-sysroot", + "Tarball": "debian_bullseye_arm_sysroot.tar.xz" }, - "sid_arm64": { - "Sha1Sum": "2befe8ce3e88be6080e4fb7e6d412278ea6a7625", - "SysrootDir": "debian_sid_arm64-sysroot", - "Tarball": "debian_sid_arm64_sysroot.tar.xz" + "bullseye_arm64": { + "Key": "20230329T085712Z-1", + "Sha1Sum": "80fc74e431f37f590d0c85f16a9d8709088929e8", + "SysrootDir": "debian_bullseye_arm64-sysroot", + "Tarball": "debian_bullseye_arm64_sysroot.tar.xz" }, - "sid_armel": { - "Sha1Sum": "a0e2a51aaa7d779fc45415ac30c835b67caa6663", - "SysrootDir": "debian_sid_armel-sysroot", - "Tarball": "debian_sid_armel_sysroot.tar.xz" + "bullseye_armel": { + "Key": "20230329T085712Z-1", + "Sha1Sum": "37801ea4e948feabd69c74390c4b80e932b63de0", + "SysrootDir": "debian_bullseye_armel-sysroot", + "Tarball": "debian_bullseye_armel_sysroot.tar.xz" }, - "sid_i386": { - "Sha1Sum": "d53a049af5961f2f121ee4e149918097c193f8ed", - "SysrootDir": "debian_sid_i386-sysroot", - "Tarball": "debian_sid_i386_sysroot.tar.xz" + "bullseye_i386": { + "Key": "20230329T085712Z-1", + "Sha1Sum": "b6c18d06d79c0abb870a126a3ae5f8086e355e5f", + "SysrootDir": "debian_bullseye_i386-sysroot", + "Tarball": "debian_bullseye_i386_sysroot.tar.xz" }, - "sid_mips": { - "Sha1Sum": "eb577cef43088b7e0540950c74f994267631d4cd", - "SysrootDir": "debian_sid_mips-sysroot", - "Tarball": "debian_sid_mips_sysroot.tar.xz" + "bullseye_mips": { + "Key": "20230329T085712Z-1", + "Sha1Sum": "fed66c435eeb0bd71e9394a0fb0c4e078d90ea50", + "SysrootDir": "debian_bullseye_mips-sysroot", + "Tarball": "debian_bullseye_mips_sysroot.tar.xz" }, - "sid_mips64el": { - "Sha1Sum": "6cb76f27035d1460fe164f7e6c5318c047aac153", - "SysrootDir": "debian_sid_mips64el-sysroot", - "Tarball": "debian_sid_mips64el_sysroot.tar.xz" + "bullseye_mips64el": { + "Key": "20230329T085712Z-1", + "Sha1Sum": "37e23cd7512b3c4d0dacbc5d253f3a496c38f5fb", + "SysrootDir": "debian_bullseye_mips64el-sysroot", + "Tarball": "debian_bullseye_mips64el_sysroot.tar.xz" } } diff --git a/build/linux/sysroot_scripts/update-archive-timestamp.sh b/build/linux/sysroot_scripts/update-archive-timestamp.sh index a61dd9957174..ff5adf16915a 100755 --- a/build/linux/sysroot_scripts/update-archive-timestamp.sh +++ b/build/linux/sysroot_scripts/update-archive-timestamp.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/linux/unbundle/absl_algorithm.gn b/build/linux/unbundle/absl_algorithm.gn new file mode 100644 index 000000000000..cc41c6861728 --- /dev/null +++ b/build/linux/unbundle/absl_algorithm.gn @@ -0,0 +1,22 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_algorithm_container") { + packages = [ "absl_algorithm_container" ] +} + +shim_headers("container_shim") { + root_path = "." + prefix = "absl/algorithm/" + headers = [ "container.h" ] +} + +source_set("container") { + deps = [ ":container_shim" ] + public_configs = [ ":system_absl_algorithm_container" ] +} + +source_set("algorithm_test") { +} +source_set("container_test") { +} diff --git a/build/linux/unbundle/absl_base.gn b/build/linux/unbundle/absl_base.gn new file mode 100644 index 000000000000..d83e9786a315 --- /dev/null +++ b/build/linux/unbundle/absl_base.gn @@ -0,0 +1,67 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_base") { + packages = [ "absl_base" ] +} + +pkg_config("system_absl_config") { + packages = [ "absl_config" ] +} + +pkg_config("system_absl_core_headers") { + packages = [ "absl_core_headers" ] +} + +shim_headers("base_shim") { + root_path = "." + prefix = "absl/base/" + headers = [ + "call_once.h", + "casts.h", + ] +} + +source_set("base") { + deps = [ ":base_shim" ] + public_configs = [ ":system_absl_base" ] +} + +shim_headers("config_shim") { + root_path = "." + prefix = "absl/base/" + headers = [ + "config.h", + "options.h", + "policy_checks.h", + ] +} + +source_set("config") { + deps = [ ":config_shim" ] + public_configs = [ ":system_absl_config" ] +} + +shim_headers("core_headers_shim") { + root_path = "." + prefix = "absl/base/" + headers = [ + "attributes.h", + "const_init.h", + "macros.h", + "optimization.h", + "port.h", + "thread_annotations.h", + ] +} + +source_set("core_headers") { + deps = [ ":core_headers_shim" ] + public_configs = [ ":system_absl_core_headers" ] +} + +source_set("config_test") { +} + +source_set("prefetch_test") { +} diff --git a/build/linux/unbundle/absl_cleanup.gn b/build/linux/unbundle/absl_cleanup.gn new file mode 100644 index 000000000000..61455edf31ff --- /dev/null +++ b/build/linux/unbundle/absl_cleanup.gn @@ -0,0 +1,20 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_cleanup") { + packages = [ "absl_cleanup" ] +} + +shim_headers("cleanup_shim") { + root_path = "." + prefix = "absl/cleanup/" + headers = [ "cleanup.h" ] +} + +source_set("cleanup") { + deps = [ ":cleanup_shim" ] + public_configs = [ ":system_absl_cleanup" ] +} + +source_set("cleanup_test") { +} diff --git a/build/linux/unbundle/absl_container.gn b/build/linux/unbundle/absl_container.gn new file mode 100644 index 000000000000..4e9796a83b4a --- /dev/null +++ b/build/linux/unbundle/absl_container.gn @@ -0,0 +1,119 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_btree") { + packages = [ "absl_btree" ] +} + +pkg_config("system_absl_fixed_array") { + packages = [ "absl_fixed_array" ] +} + +pkg_config("system_absl_flat_hash_map") { + packages = [ "absl_flat_hash_map" ] +} + +pkg_config("system_absl_flat_hash_set") { + packages = [ "absl_flat_hash_set" ] +} + +pkg_config("system_absl_inlined_vector") { + packages = [ "absl_inlined_vector" ] +} + +pkg_config("system_absl_node_hash_map") { + packages = [ "absl_node_hash_map" ] +} + +pkg_config("system_absl_node_hash_set") { + packages = [ "absl_node_hash_set" ] +} + +shim_headers("btree_shim") { + root_path = "." + prefix = "absl/container/" + headers = [ + "btree_map.h", + "btree_set.h", + ] +} + +source_set("btree") { + deps = [ ":btree_shim" ] + public_configs = [ ":system_absl_btree" ] +} + +shim_headers("fixed_array_shim") { + root_path = "." + prefix = "absl/container/" + headers = [ "fixed_array.h" ] +} + +source_set("fixed_array") { + deps = [ ":fixed_array_shim" ] + public_configs = [ ":system_absl_fixed_array" ] +} + +shim_headers("flat_hash_map_shim") { + root_path = "." + prefix = "absl/container/" + headers = [ "flat_hash_map.h" ] +} + +source_set("flat_hash_map") { + deps = [ ":flat_hash_map_shim" ] + public_configs = [ ":system_absl_flat_hash_map" ] +} + +shim_headers("flat_hash_set_shim") { + root_path = "." + prefix = "absl/container/" + headers = [ "flat_hash_set.h" ] +} + +source_set("flat_hash_set") { + deps = [ ":flat_hash_set_shim" ] + public_configs = [ ":system_absl_flat_hash_set" ] +} + +shim_headers("inlined_vector_shim") { + root_path = "." + prefix = "absl/container/" + headers = [ "inlined_vector.h" ] +} + +source_set("inlined_vector") { + deps = [ ":inlined_vector_shim" ] + public_configs = [ ":system_absl_inlined_vector" ] +} + +shim_headers("node_hash_map_shim") { + root_path = "." + prefix = "absl/container/" + headers = [ "node_hash_map.h" ] +} + +source_set("node_hash_map") { + deps = [ ":node_hash_map_shim" ] + public_configs = [ ":system_absl_node_hash_map" ] +} + +shim_headers("node_hash_set_shim") { + root_path = "." + prefix = "absl/container/" + headers = [ "node_hash_set.h" ] +} + +source_set("node_hash_set") { + deps = [ ":node_hash_set_shim" ] + public_configs = [ ":system_absl_node_hash_set" ] +} + +source_set("common_policy_traits_test") { +} +source_set("inlined_vector_test") { +} +source_set("node_slot_policy_test") { +} +source_set("sample_element_size_test") { +} diff --git a/build/linux/unbundle/absl_debugging.gn b/build/linux/unbundle/absl_debugging.gn new file mode 100644 index 000000000000..2c38e4357e16 --- /dev/null +++ b/build/linux/unbundle/absl_debugging.gn @@ -0,0 +1,47 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_failure_signal_handler") { + packages = [ "absl_failure_signal_handler" ] +} + +pkg_config("system_absl_stacktrace") { + packages = [ "absl_stacktrace" ] +} + +pkg_config("system_absl_symbolize") { + packages = [ "absl_symbolize" ] +} + +shim_headers("failure_signal_handler_shim") { + root_path = "." + prefix = "absl/debugging/" + headers = [ "failure_signal_handler.h" ] +} + +source_set("failure_signal_handler") { + deps = [ ":failure_signal_handler_shim" ] + public_configs = [ ":system_absl_failure_signal_handler" ] +} + +shim_headers("stacktrace_shim") { + root_path = "." + prefix = "absl/debugging/" + headers = [ "stacktrace.h" ] +} + +source_set("stacktrace") { + deps = [ ":stacktrace_shim" ] + public_configs = [ ":system_absl_stacktrace" ] +} + +shim_headers("symbolize_shim") { + root_path = "." + prefix = "absl/debugging/" + headers = [ "symbolize.h" ] +} + +source_set("symbolize") { + deps = [ ":symbolize_shim" ] + public_configs = [ ":system_absl_symbolize" ] +} diff --git a/build/linux/unbundle/absl_flags.gn b/build/linux/unbundle/absl_flags.gn new file mode 100644 index 000000000000..e420603fc860 --- /dev/null +++ b/build/linux/unbundle/absl_flags.gn @@ -0,0 +1,50 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_flags") { + packages = [ "absl_flags" ] +} + +pkg_config("system_absl_flags_parse") { + packages = [ "absl_flags_parse" ] +} + +pkg_config("system_absl_flags_usage") { + packages = [ "absl_flags_usage" ] +} + +shim_headers("flag_shim") { + root_path = "." + prefix = "absl/flags/" + headers = [ + "declare.h", + "flag.h", + ] +} + +source_set("flag") { + deps = [ ":flag_shim" ] + public_configs = [ ":system_absl_flags" ] +} + +shim_headers("parse_shim") { + root_path = "." + prefix = "absl/flags/" + headers = [ "parse.h" ] +} + +source_set("parse") { + deps = [ ":parse_shim" ] + public_configs = [ ":system_absl_flags_parse" ] +} + +shim_headers("usage_shim") { + root_path = "." + prefix = "absl/flags/" + headers = [ "usage.h" ] +} + +source_set("usage") { + deps = [ ":usage_shim" ] + public_configs = [ ":system_absl_flags_usage" ] +} diff --git a/build/linux/unbundle/absl_functional.gn b/build/linux/unbundle/absl_functional.gn new file mode 100644 index 000000000000..1719587a9b7a --- /dev/null +++ b/build/linux/unbundle/absl_functional.gn @@ -0,0 +1,49 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_any_invocable") { + packages = [ "absl_any_invocable" ] +} + +shim_headers("any_invocable_shim") { + root_path = "." + prefix = "absl/functional/" + headers = [ "any_invocable.h" ] +} + +source_set("any_invocable") { + deps = [ ":any_invocable_shim" ] + public_configs = [ ":system_absl_any_invocable" ] +} + +pkg_config("system_absl_bind_front") { + packages = [ "absl_bind_front" ] +} + +shim_headers("bind_front_shim") { + root_path = "." + prefix = "absl/functional/" + headers = [ "bind_front.h" ] +} + +source_set("bind_front") { + deps = [ ":bind_front_shim" ] + public_configs = [ ":system_absl_bind_front" ] +} + +pkg_config("system_absl_function_ref") { + packages = [ "absl_function_ref" ] +} + +shim_headers("function_ref_shim") { + root_path = "." + prefix = "absl/functional/" + headers = [ "function_ref.h" ] +} + +source_set("function_ref") { + deps = [ ":function_ref_shim" ] + public_configs = [ ":system_absl_function_ref" ] +} + +source_set("any_invocable_test") {} diff --git a/build/linux/unbundle/absl_hash.gn b/build/linux/unbundle/absl_hash.gn new file mode 100644 index 000000000000..cb07851c8b16 --- /dev/null +++ b/build/linux/unbundle/absl_hash.gn @@ -0,0 +1,22 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_hash") { + packages = [ "absl_hash" ] +} + +shim_headers("hash_shim") { + root_path = "." + prefix = "absl/hash/" + headers = [ "hash.h" ] +} + +source_set("hash") { + deps = [ ":hash_shim" ] + public_configs = [ ":system_absl_hash" ] +} + +source_set("hash_test") { +} +source_set("low_level_hash_test") { +} diff --git a/build/linux/unbundle/absl_log.gn b/build/linux/unbundle/absl_log.gn new file mode 100644 index 000000000000..85a09a068348 --- /dev/null +++ b/build/linux/unbundle/absl_log.gn @@ -0,0 +1,13 @@ +source_set("basic_log_test") {} +source_set("check_test") {} +source_set("die_if_null_test") {} +source_set("flags_test") {} +source_set("globals_test") {} +source_set("log_entry_test") {} +source_set("log_format_test") {} +source_set("log_macro_hygiene_test") {} +source_set("log_modifier_methods_test") {} +source_set("log_sink_test") {} +source_set("log_streamer_test") {} +source_set("scoped_mock_log_test") {} +source_set("stripping_test") {} diff --git a/build/linux/unbundle/absl_log_internal.gn b/build/linux/unbundle/absl_log_internal.gn new file mode 100644 index 000000000000..f58c7f861389 --- /dev/null +++ b/build/linux/unbundle/absl_log_internal.gn @@ -0,0 +1 @@ +source_set("stderr_log_sink_test") {} diff --git a/build/linux/unbundle/absl_memory.gn b/build/linux/unbundle/absl_memory.gn new file mode 100644 index 000000000000..5d6abe87f98f --- /dev/null +++ b/build/linux/unbundle/absl_memory.gn @@ -0,0 +1,20 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_memory") { + packages = [ "absl_memory" ] +} + +shim_headers("memory_shim") { + root_path = "." + prefix = "absl/memory/" + headers = [ "memory.h" ] +} + +source_set("memory") { + deps = [ ":memory_shim" ] + public_configs = [ ":system_absl_memory" ] +} + +source_set("memory_test") { +} diff --git a/build/linux/unbundle/absl_meta.gn b/build/linux/unbundle/absl_meta.gn new file mode 100644 index 000000000000..7f79a06ccb0f --- /dev/null +++ b/build/linux/unbundle/absl_meta.gn @@ -0,0 +1,20 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_type_traits") { + packages = [ "absl_type_traits" ] +} + +shim_headers("type_traits_shim") { + root_path = "." + prefix = "absl/meta/" + headers = [ "type_traits.h" ] +} + +source_set("type_traits") { + deps = [ ":type_traits_shim" ] + public_configs = [ ":system_absl_type_traits" ] +} + +source_set("type_traits_test") { +} diff --git a/build/linux/unbundle/absl_numeric.gn b/build/linux/unbundle/absl_numeric.gn new file mode 100644 index 000000000000..c3688f9f2ae5 --- /dev/null +++ b/build/linux/unbundle/absl_numeric.gn @@ -0,0 +1,32 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_bits") { + packages = [ "absl_bits" ] +} + +pkg_config("system_absl_int128") { + packages = [ "absl_int128" ] +} + +shim_headers("bits_shim") { + root_path = "." + prefix = "absl/numeric/" + headers = [ "bits.h" ] +} + +source_set("bits") { + deps = [ ":bits_shim" ] + public_configs = [ ":system_absl_bits" ] +} + +shim_headers("int128_shim") { + root_path = "." + prefix = "absl/numeric/" + headers = [ "int128.h" ] +} + +source_set("int128") { + deps = [ ":int128_shim" ] + public_configs = [ ":system_absl_int128" ] +} diff --git a/build/linux/unbundle/absl_random.gn b/build/linux/unbundle/absl_random.gn new file mode 100644 index 000000000000..e52c9fcd961b --- /dev/null +++ b/build/linux/unbundle/absl_random.gn @@ -0,0 +1,17 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_random_random") { + packages = [ "absl_random_random" ] +} + +shim_headers("random_shim") { + root_path = "." + prefix = "absl/random/" + headers = [ "random.h" ] +} + +source_set("random") { + deps = [ ":random_shim" ] + public_configs = [ ":system_absl_random_random" ] +} diff --git a/build/linux/unbundle/absl_status.gn b/build/linux/unbundle/absl_status.gn new file mode 100644 index 000000000000..b7f40b0bf09e --- /dev/null +++ b/build/linux/unbundle/absl_status.gn @@ -0,0 +1,38 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_status") { + packages = [ "absl_status" ] +} + +pkg_config("system_absl_statusor") { + packages = [ "absl_statusor" ] +} + +shim_headers("status_shim") { + root_path = "." + prefix = "absl/status/" + headers = [ + "status.h", + "status_payload_printer.h", + ] +} + +source_set("status") { + deps = [ ":status_shim" ] + public_configs = [ ":system_absl_status" ] +} + +shim_headers("statusor_shim") { + root_path = "." + prefix = "absl/status/" + headers = [ "statusor.h" ] +} + +source_set("statusor") { + deps = [ ":statusor_shim" ] + public_configs = [ ":system_absl_statusor" ] +} + +source_set("statusor_test") { +} diff --git a/build/linux/unbundle/absl_strings.gn b/build/linux/unbundle/absl_strings.gn new file mode 100644 index 000000000000..f06a19bc11ca --- /dev/null +++ b/build/linux/unbundle/absl_strings.gn @@ -0,0 +1,93 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_cord") { + packages = [ "absl_cord" ] +} + +pkg_config("system_absl_strings") { + packages = [ "absl_strings" ] +} + +pkg_config("system_absl_str_format") { + packages = [ "absl_str_format" ] +} + +shim_headers("cord_shim") { + root_path = "." + prefix = "absl/strings/" + headers = [ "cord.h" ] +} + +source_set("cord") { + deps = [ ":cord_shim" ] + public_configs = [ ":system_absl_cord" ] +} + +shim_headers("strings_shim") { + root_path = "." + prefix = "absl/strings/" + headers = [ + "ascii.h", + "charconv.h", + "escaping.h", + "match.h", + "numbers.h", + "str_cat.h", + "str_join.h", + "str_replace.h", + "str_split.h", + "string_view.h", + "strip.h", + "substitute.h", + ] +} + +source_set("strings") { + deps = [ ":strings_shim" ] + public_configs = [ ":system_absl_strings" ] +} + +shim_headers("str_format_shim") { + root_path = "." + prefix = "absl/strings/" + headers = [ "str_format.h" ] +} + +source_set("str_format") { + deps = [ ":str_format_shim" ] + public_configs = [ ":system_absl_str_format" ] +} + +source_set("ascii_test") { +} +source_set("cord_buffer_test") { +} +source_set("cord_data_edge_test") { +} +source_set("cord_rep_btree_navigator_test") { +} +source_set("cord_rep_btree_reader_test") { +} +source_set("cord_rep_btree_test") { +} +source_set("cord_rep_crc_test") { +} +source_set("cordz_functions_test") { +} +source_set("cordz_info_statistics_test") { +} +source_set("cordz_info_test") { +} +source_set("cordz_test") { +} +source_set("cordz_update_scope_test") { +} +source_set("cordz_update_tracker_test") { +} +source_set("match_test") { +} +source_set("str_replace_test") { +} +source_set("string_view_test") { +} diff --git a/build/linux/unbundle/absl_synchronization.gn b/build/linux/unbundle/absl_synchronization.gn new file mode 100644 index 000000000000..60bcf94213fd --- /dev/null +++ b/build/linux/unbundle/absl_synchronization.gn @@ -0,0 +1,22 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_synchronization") { + packages = [ "absl_synchronization" ] +} + +shim_headers("synchronization_shim") { + root_path = "." + prefix = "absl/synchronization/" + headers = [ + "barrier.h", + "blocking_counter.h", + "mutex.h", + "notification.h", + ] +} + +source_set("synchronization") { + deps = [ ":synchronization_shim" ] + public_configs = [ ":system_absl_synchronization" ] +} diff --git a/build/linux/unbundle/absl_time.gn b/build/linux/unbundle/absl_time.gn new file mode 100644 index 000000000000..df5cd20f4995 --- /dev/null +++ b/build/linux/unbundle/absl_time.gn @@ -0,0 +1,21 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_time") { + packages = [ "absl_time" ] +} + +shim_headers("time_shim") { + root_path = "." + prefix = "absl/time/" + headers = [ + "civil_time.h", + "clock.h", + "time.h", + ] +} + +source_set("time") { + deps = [ ":time_shim" ] + public_configs = [ ":system_absl_time" ] +} diff --git a/build/linux/unbundle/absl_types.gn b/build/linux/unbundle/absl_types.gn new file mode 100644 index 000000000000..4bb77f1b631c --- /dev/null +++ b/build/linux/unbundle/absl_types.gn @@ -0,0 +1,97 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_any") { + packages = [ "absl_any" ] +} + +pkg_config("system_absl_bad_any_cast") { + packages = [ "absl_bad_any_cast" ] +} + +pkg_config("system_absl_bad_optional_access") { + packages = [ "absl_bad_optional_access" ] +} + +pkg_config("system_absl_optional") { + packages = [ "absl_optional" ] +} + +pkg_config("system_absl_span") { + packages = [ "absl_span" ] +} + +pkg_config("system_absl_variant") { + packages = [ "absl_variant" ] +} + +shim_headers("any_shim") { + root_path = "." + prefix = "absl/types/" + headers = [ "any.h" ] +} + +source_set("any") { + deps = [ ":any_shim" ] + public_configs = [ ":system_absl_any" ] +} + +shim_headers("bad_any_cast_shim") { + root_path = "." + prefix = "absl/types/" + headers = [ "bad_any_cast.h" ] +} + +source_set("bad_any_cast") { + deps = [ ":bad_any_cast_shim" ] + public_configs = [ ":system_absl_bad_any_cast" ] +} + +shim_headers("bad_optional_access_shim") { + root_path = "." + prefix = "absl/types/" + headers = [ "bad_optional_access.h" ] +} + +source_set("bad_optional_access") { + deps = [ ":bad_optional_access_shim" ] + public_configs = [ ":system_absl_bad_optional_access" ] +} + +shim_headers("optional_shim") { + root_path = "." + prefix = "absl/types/" + headers = [ "optional.h" ] +} + +source_set("optional") { + deps = [ ":optional_shim" ] + public_configs = [ ":system_absl_optional" ] +} + +shim_headers("span_shim") { + root_path = "." + prefix = "absl/types/" + headers = [ "span.h" ] +} + +source_set("span") { + deps = [ ":span_shim" ] + public_configs = [ ":system_absl_span" ] +} + +shim_headers("variant_shim") { + root_path = "." + prefix = "absl/types/" + headers = [ "variant.h" ] +} + +source_set("variant") { + deps = [ ":variant_shim" ] + public_configs = [ ":system_absl_variant" ] +} + +source_set("optional_test") { +} +source_set("variant_test") { +} diff --git a/build/linux/unbundle/absl_utility.gn b/build/linux/unbundle/absl_utility.gn new file mode 100644 index 000000000000..8dda6ed01afa --- /dev/null +++ b/build/linux/unbundle/absl_utility.gn @@ -0,0 +1,17 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_utility") { + packages = [ "absl_utility" ] +} + +shim_headers("utility_shim") { + root_path = "." + prefix = "absl/utility/" + headers = [ "utility.h" ] +} + +source_set("utility") { + deps = [ ":utility_shim" ] + public_configs = [ ":system_absl_utility" ] +} diff --git a/build/linux/unbundle/brotli.gn b/build/linux/unbundle/brotli.gn new file mode 100644 index 000000000000..09f55d1ada19 --- /dev/null +++ b/build/linux/unbundle/brotli.gn @@ -0,0 +1,35 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_brotli_dec") { + packages = [ "libbrotlidec" ] +} + +pkg_config("system_brotli_enc") { + packages = [ "libbrotlienc" ] +} + +shim_headers("brotli_shim") { + root_path = "include" + headers = [ + "brotli/decode.h", + "brotli/encode.h", + "brotli/port.h", + "brotli/types.h", + ] +} + +source_set("dec") { + deps = [ ":brotli_shim" ] + public_configs = [ ":system_brotli_dec" ] +} + +source_set("enc") { + deps = [ ":brotli_shim" ] + public_configs = [ ":system_brotli_enc" ] +} + +copy("brotli") { + sources = [ "/usr/bin/brotli" ] + outputs = [ "$root_out_dir/brotli" ] +} diff --git a/build/linux/unbundle/crc32c.gn b/build/linux/unbundle/crc32c.gn new file mode 100644 index 000000000000..23f2292d9d49 --- /dev/null +++ b/build/linux/unbundle/crc32c.gn @@ -0,0 +1,11 @@ +import("//build/shim_headers.gni") + +shim_headers("crc32c_shim") { + root_path = "src/include" + headers = [ "crc32c/crc32c.h" ] +} + +source_set("crc32c") { + deps = [ ":crc32c_shim" ] + libs = [ "crc32c" ] +} diff --git a/build/linux/unbundle/dav1d.gn b/build/linux/unbundle/dav1d.gn new file mode 100644 index 000000000000..3d65158bc614 --- /dev/null +++ b/build/linux/unbundle/dav1d.gn @@ -0,0 +1,23 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_dav1d") { + packages = [ "dav1d" ] +} + +shim_headers("dav1d_shim") { + root_path = "libdav1d/include" + headers = [ + "dav1d/common.h", + "dav1d/data.h", + "dav1d/dav1d.h", + "dav1d/headers.h", + "dav1d/picture.h", + "dav1d/version.h", + ] +} + +source_set("dav1d") { + deps = [ ":dav1d_shim" ] + public_configs = [ ":system_dav1d" ] +} diff --git a/build/linux/unbundle/double-conversion.gn b/build/linux/unbundle/double-conversion.gn new file mode 100644 index 000000000000..8f970c5a2d5c --- /dev/null +++ b/build/linux/unbundle/double-conversion.gn @@ -0,0 +1,23 @@ +import("//build/shim_headers.gni") + +shim_headers("double_conversion_shim") { + root_path = "." + headers = [ + "double-conversion/bignum.h", + "double-conversion/cached-powers.h", + "double-conversion/diy-fp.h", + "double-conversion/double-conversion.h", + "double-conversion/double-to-string.h", + "double-conversion/fast-dtoa.h", + "double-conversion/fixed-dtoa.h", + "double-conversion/ieee.h", + "double-conversion/string-to-double.h", + "double-conversion/strtod.h", + "double-conversion/utils.h", + ] +} + +source_set("double_conversion") { + deps = [ ":double_conversion_shim" ] + libs = [ "double-conversion" ] +} diff --git a/build/linux/unbundle/ffmpeg.gn b/build/linux/unbundle/ffmpeg.gn index 978298a7f291..0008275dade8 100644 --- a/build/linux/unbundle/ffmpeg.gn +++ b/build/linux/unbundle/ffmpeg.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -23,6 +23,7 @@ shim_headers("ffmpeg_shim") { root_path = "." headers = [ "libavcodec/avcodec.h", + "libavcodec/packet.h", "libavformat/avformat.h", "libavutil/imgutils.h", ] diff --git a/build/linux/unbundle/flac.gn b/build/linux/unbundle/flac.gn index ced81d655083..ce9a1b3778bb 100644 --- a/build/linux/unbundle/flac.gn +++ b/build/linux/unbundle/flac.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/linux/unbundle/fontconfig.gn b/build/linux/unbundle/fontconfig.gn index 49236c9c8ace..87926d59fdbc 100644 --- a/build/linux/unbundle/fontconfig.gn +++ b/build/linux/unbundle/fontconfig.gn @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/linux/unbundle/freetype.gn b/build/linux/unbundle/freetype.gn index cafa9db6b7fb..73f9666833e5 100644 --- a/build/linux/unbundle/freetype.gn +++ b/build/linux/unbundle/freetype.gn @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/linux/unbundle/harfbuzz-ng.gn b/build/linux/unbundle/harfbuzz-ng.gn index b4ba17a9b833..604272d937ae 100644 --- a/build/linux/unbundle/harfbuzz-ng.gn +++ b/build/linux/unbundle/harfbuzz-ng.gn @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/linux/unbundle/icu.gn b/build/linux/unbundle/icu.gn index 6f3f8438bd85..366ffd5a3597 100644 --- a/build/linux/unbundle/icu.gn +++ b/build/linux/unbundle/icu.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/linux/unbundle/jsoncpp.gn b/build/linux/unbundle/jsoncpp.gn new file mode 100644 index 000000000000..e84a0ef27a04 --- /dev/null +++ b/build/linux/unbundle/jsoncpp.gn @@ -0,0 +1,32 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("jsoncpp_config") { + packages = [ "jsoncpp" ] + + # Defining JSON_DLL_BUILD applies public visibility to jsoncpp classes + # thus deactivating CFI checks for them. This avoids CFI violations in + # virtual calls to system jsoncpp library (https://crbug.com/1365218). + defines = [ "JSON_DLL_BUILD" ] +} + +shim_headers("jsoncpp_shim") { + root_path = "source/include" + headers = [ + "json/allocator.h", + "json/assertions.h", + "json/config.h", + "json/forwards.h", + "json/json.h", + "json/json_features.h", + "json/reader.h", + "json/value.h", + "json/version.h", + "json/writer.h", + ] +} + +source_set("jsoncpp") { + deps = [ ":jsoncpp_shim" ] + public_configs = [ ":jsoncpp_config" ] +} diff --git a/build/linux/unbundle/libXNVCtrl.gn b/build/linux/unbundle/libXNVCtrl.gn new file mode 100644 index 000000000000..0e1265b8c430 --- /dev/null +++ b/build/linux/unbundle/libXNVCtrl.gn @@ -0,0 +1,19 @@ +import("//build/shim_headers.gni") + +shim_headers("libXNVCtrl_shim") { + root_path = "../../../../../third_party/libXNVCtrl" + prefix = "NVCtrl/" + headers = [ + "NVCtrl.h", + "NVCtrlLib.h", + "nv_control.h", + ] +} + +source_set("libXNVCtrl") { + deps = [ ":libXNVCtrl_shim" ] + libs = [ + "XNVCtrl", + "xcb", + ] +} diff --git a/build/linux/unbundle/libaom.gn b/build/linux/unbundle/libaom.gn new file mode 100644 index 000000000000..dab8dfaba753 --- /dev/null +++ b/build/linux/unbundle/libaom.gn @@ -0,0 +1,34 @@ +import("//build/buildflag_header.gni") +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") +import("//third_party/libaom/options.gni") + +buildflag_header("libaom_buildflags") { + header = "libaom_buildflags.h" + flags = [ "ENABLE_LIBAOM=$enable_libaom" ] +} + +pkg_config("system_aom") { + packages = [ "aom" ] +} + +shim_headers("aom_shim") { + root_path = "source/libaom" + headers = [ + "aom/aom.h", + "aom/aom_codec.h", + "aom/aom_decoder.h", + "aom/aom_encoder.h", + "aom/aom_external_partition.h", + "aom/aom_frame_buffer.h", + "aom/aom_image.h", + "aom/aom_integer.h", + "aom/aomcx.h", + "aom/aomdx.h", + ] +} + +source_set("libaom") { + deps = [ ":aom_shim" ] + public_configs = [ ":system_aom" ] +} diff --git a/build/linux/unbundle/libavif.gn b/build/linux/unbundle/libavif.gn new file mode 100644 index 000000000000..c79f95ba45ee --- /dev/null +++ b/build/linux/unbundle/libavif.gn @@ -0,0 +1,16 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_libavif") { + packages = [ "libavif" ] +} + +shim_headers("avif_shim") { + root_path = "src/include" + headers = [ "avif/avif.h" ] +} + +source_set("libavif") { + deps = [ ":avif_shim" ] + public_configs = [ ":system_libavif" ] +} diff --git a/build/linux/unbundle/libdrm.gn b/build/linux/unbundle/libdrm.gn index 30cdcef3efa1..d461b8ca5780 100644 --- a/build/linux/unbundle/libdrm.gn +++ b/build/linux/unbundle/libdrm.gn @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/linux/unbundle/libevent.gn b/build/linux/unbundle/libevent.gn index 7e1c34d3370d..2eb6d0223805 100644 --- a/build/linux/unbundle/libevent.gn +++ b/build/linux/unbundle/libevent.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/linux/unbundle/libjpeg.gn b/build/linux/unbundle/libjpeg.gn index 17398ea60b6f..3236fb802c44 100644 --- a/build/linux/unbundle/libjpeg.gn +++ b/build/linux/unbundle/libjpeg.gn @@ -1,4 +1,4 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/linux/unbundle/libpng.gn b/build/linux/unbundle/libpng.gn index 1d6590dcae64..91e0ee45da63 100644 --- a/build/linux/unbundle/libpng.gn +++ b/build/linux/unbundle/libpng.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/linux/unbundle/libvpx.gn b/build/linux/unbundle/libvpx.gn index 87be30d17056..8b7bb5e7c1a9 100644 --- a/build/linux/unbundle/libvpx.gn +++ b/build/linux/unbundle/libvpx.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/linux/unbundle/libwebp.gn b/build/linux/unbundle/libwebp.gn index de0c23031898..708cc9c2bb8e 100644 --- a/build/linux/unbundle/libwebp.gn +++ b/build/linux/unbundle/libwebp.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -14,7 +14,7 @@ pkg_config("system_libwebp") { } shim_headers("libwebp_shim") { - root_path = "src" + root_path = "src/src" headers = [ "webp/decode.h", "webp/demux.h", diff --git a/build/linux/unbundle/libxml.gn b/build/linux/unbundle/libxml.gn index 3587881eea89..8567c154f828 100644 --- a/build/linux/unbundle/libxml.gn +++ b/build/linux/unbundle/libxml.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -12,6 +12,7 @@ source_set("libxml") { public_configs = [ ":system_libxml" ] } +# Please keep in sync with //third_party/libxml/BUILD.gn. static_library("libxml_utils") { # Do not expand this visibility list without first consulting with the # Security Team. @@ -19,7 +20,8 @@ static_library("libxml_utils") { ":xml_reader", ":xml_writer", "//base/test:test_support", - "//services/data_decoder:xml_parser_fuzzer", + "//services/data_decoder:lib", + "//services/data_decoder:xml_parser_fuzzer_deps", ] sources = [ "chromium/libxml_utils.cc", @@ -28,6 +30,7 @@ static_library("libxml_utils") { public_configs = [ ":system_libxml" ] } +# Please keep in sync with //third_party/libxml/BUILD.gn. static_library("xml_reader") { # Do not expand this visibility list without first consulting with the # Security Team. @@ -35,7 +38,6 @@ static_library("xml_reader") { "//base/test:test_support", "//components/policy/core/common:unit_tests", "//services/data_decoder:*", - "//tools/traffic_annotation/auditor:auditor_sources", ] sources = [ "chromium/xml_reader.cc", @@ -44,6 +46,7 @@ static_library("xml_reader") { deps = [ ":libxml_utils" ] } +# Please keep in sync with //third_party/libxml/BUILD.gn. static_library("xml_writer") { # The XmlWriter is considered safe to use from any target. visibility = [ "*" ] diff --git a/build/linux/unbundle/libxslt.gn b/build/linux/unbundle/libxslt.gn index 885574ef89f8..787796063a28 100644 --- a/build/linux/unbundle/libxslt.gn +++ b/build/linux/unbundle/libxslt.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/linux/unbundle/libyuv.gn b/build/linux/unbundle/libyuv.gn new file mode 100644 index 000000000000..a3363e45eebe --- /dev/null +++ b/build/linux/unbundle/libyuv.gn @@ -0,0 +1,37 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_yuv") { + packages = [ "libyuv" ] +} + +shim_headers("libyuv_shim") { + root_path = "include" + headers = [ + "libyuv.h", + "libyuv/basic_types.h", + "libyuv/compare.h", + "libyuv/convert.h", + "libyuv/convert_argb.h", + "libyuv/convert_from.h", + "libyuv/convert_from_argb.h", + "libyuv/cpu_id.h", + "libyuv/mjpeg_decoder.h", + "libyuv/planar_functions.h", + "libyuv/rotate.h", + "libyuv/rotate_argb.h", + "libyuv/rotate_row.h", + "libyuv/row.h", + "libyuv/scale.h", + "libyuv/scale_argb.h", + "libyuv/scale_row.h", + "libyuv/scale_uv.h", + "libyuv/version.h", + "libyuv/video_common.h", + ] +} + +source_set("libyuv") { + deps = [ ":libyuv_shim" ] + public_configs = [ ":system_yuv" ] +} diff --git a/build/linux/unbundle/openh264.gn b/build/linux/unbundle/openh264.gn index 882e0a8223bf..f4abd9b5f7b7 100644 --- a/build/linux/unbundle/openh264.gn +++ b/build/linux/unbundle/openh264.gn @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -11,7 +11,7 @@ pkg_config("config") { shim_headers("openh264_shim") { prefix = "wels/" - root_path = "src/codec/api/svc" + root_path = "src/codec/api/wels" headers = [ "codec_api.h", "codec_app_def.h", diff --git a/build/linux/unbundle/opus.gn b/build/linux/unbundle/opus.gn index 504d7d27f0f7..b1a199a53240 100644 --- a/build/linux/unbundle/opus.gn +++ b/build/linux/unbundle/opus.gn @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/linux/unbundle/re2.gn b/build/linux/unbundle/re2.gn index 824d77881314..d8e4d79b182f 100644 --- a/build/linux/unbundle/re2.gn +++ b/build/linux/unbundle/re2.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/linux/unbundle/remove_bundled_libraries.py b/build/linux/unbundle/remove_bundled_libraries.py index 899877a1654e..43050eee7d6b 100755 --- a/build/linux/unbundle/remove_bundled_libraries.py +++ b/build/linux/unbundle/remove_bundled_libraries.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2013 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -9,8 +9,6 @@ See README for more details. """ -from __future__ import print_function - import optparse import os.path import sys diff --git a/build/linux/unbundle/replace_gn_files.py b/build/linux/unbundle/replace_gn_files.py index eba4bd1fb3c8..0483cd6921f0 100755 --- a/build/linux/unbundle/replace_gn_files.py +++ b/build/linux/unbundle/replace_gn_files.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2016 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -8,8 +8,6 @@ make the build use system libraries. """ -from __future__ import print_function - import argparse import os import shutil @@ -17,24 +15,64 @@ REPLACEMENTS = { + # Use system libabsl_2xxx. These 20 shims MUST be used together. + 'absl_algorithm': 'third_party/abseil-cpp/absl/algorithm/BUILD.gn', + 'absl_base': 'third_party/abseil-cpp/absl/base/BUILD.gn', + 'absl_cleanup': 'third_party/abseil-cpp/absl/cleanup/BUILD.gn', + 'absl_container': 'third_party/abseil-cpp/absl/container/BUILD.gn', + 'absl_debugging': 'third_party/abseil-cpp/absl/debugging/BUILD.gn', + 'absl_flags': 'third_party/abseil-cpp/absl/flags/BUILD.gn', + 'absl_functional': 'third_party/abseil-cpp/absl/functional/BUILD.gn', + 'absl_hash': 'third_party/abseil-cpp/absl/hash/BUILD.gn', + 'absl_log': 'third_party/abseil-cpp/absl/log/BUILD.gn', + 'absl_log_internal': 'third_party/abseil-cpp/absl/log/internal/BUILD.gn', + 'absl_memory': 'third_party/abseil-cpp/absl/memory/BUILD.gn', + 'absl_meta': 'third_party/abseil-cpp/absl/meta/BUILD.gn', + 'absl_numeric': 'third_party/abseil-cpp/absl/numeric/BUILD.gn', + 'absl_random': 'third_party/abseil-cpp/absl/random/BUILD.gn', + 'absl_status': 'third_party/abseil-cpp/absl/status/BUILD.gn', + 'absl_strings': 'third_party/abseil-cpp/absl/strings/BUILD.gn', + 'absl_synchronization': 'third_party/abseil-cpp/absl/synchronization/BUILD.gn', + 'absl_time': 'third_party/abseil-cpp/absl/time/BUILD.gn', + 'absl_types': 'third_party/abseil-cpp/absl/types/BUILD.gn', + 'absl_utility': 'third_party/abseil-cpp/absl/utility/BUILD.gn', + # + 'brotli': 'third_party/brotli/BUILD.gn', + 'crc32c': 'third_party/crc32c/BUILD.gn', + 'dav1d': 'third_party/dav1d/BUILD.gn', + 'double-conversion': 'base/third_party/double_conversion/BUILD.gn', 'ffmpeg': 'third_party/ffmpeg/BUILD.gn', 'flac': 'third_party/flac/BUILD.gn', 'fontconfig': 'third_party/fontconfig/BUILD.gn', 'freetype': 'build/config/freetype/freetype.gni', 'harfbuzz-ng': 'third_party/harfbuzz-ng/harfbuzz.gni', 'icu': 'third_party/icu/BUILD.gn', + 'jsoncpp' : 'third_party/jsoncpp/BUILD.gn', + 'libaom' : 'third_party/libaom/BUILD.gn', + 'libavif' : 'third_party/libavif/BUILD.gn', 'libdrm': 'third_party/libdrm/BUILD.gn', - 'libevent': 'base/third_party/libevent/BUILD.gn', + 'libevent': 'third_party/libevent/BUILD.gn', 'libjpeg': 'third_party/libjpeg.gni', 'libpng': 'third_party/libpng/BUILD.gn', 'libvpx': 'third_party/libvpx/BUILD.gn', 'libwebp': 'third_party/libwebp/BUILD.gn', 'libxml': 'third_party/libxml/BUILD.gn', + 'libXNVCtrl' : 'third_party/angle/src/third_party/libXNVCtrl/BUILD.gn', 'libxslt': 'third_party/libxslt/BUILD.gn', + 'libyuv' : 'third_party/libyuv/BUILD.gn', 'openh264': 'third_party/openh264/BUILD.gn', 'opus': 'third_party/opus/BUILD.gn', 're2': 'third_party/re2/BUILD.gn', 'snappy': 'third_party/snappy/BUILD.gn', + # Use system libSPIRV-Tools in Swiftshader. These two shims MUST be used together. + 'swiftshader-SPIRV-Headers' : 'third_party/swiftshader/third_party/SPIRV-Headers/BUILD.gn', + 'swiftshader-SPIRV-Tools' : 'third_party/swiftshader/third_party/SPIRV-Tools/BUILD.gn', + # Use system libSPIRV-Tools inside ANGLE. These two shims MUST be used together + # and can only be used if WebGPU is not compiled (use_dawn=false) + 'vulkan-SPIRV-Headers' : 'third_party/vulkan-deps/spirv-headers/src/BUILD.gn', + 'vulkan-SPIRV-Tools' : 'third_party/vulkan-deps/spirv-tools/src/BUILD.gn', + # + 'woff2': 'third_party/woff2/BUILD.gn', 'zlib': 'third_party/zlib/BUILD.gn', } diff --git a/build/linux/unbundle/snappy.gn b/build/linux/unbundle/snappy.gn index 966666f800a3..dea0b5543d8d 100644 --- a/build/linux/unbundle/snappy.gn +++ b/build/linux/unbundle/snappy.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/linux/unbundle/swiftshader-SPIRV-Headers.gn b/build/linux/unbundle/swiftshader-SPIRV-Headers.gn new file mode 100644 index 000000000000..24f79de1e46c --- /dev/null +++ b/build/linux/unbundle/swiftshader-SPIRV-Headers.gn @@ -0,0 +1,17 @@ +import("//build/shim_headers.gni") + +shim_headers("SPIRV-Headers_shim") { + root_path = "../../../../third_party/SPIRV-Headers/include" + headers = [ + "spirv/unified1/GLSL.std.450.h", + "spirv/unified1/NonSemanticClspvReflection.h", + "spirv/unified1/NonSemanticDebugPrintf.h", + "spirv/unified1/OpenCL.std.h", + "spirv/unified1/spirv.h", + "spirv/unified1/spirv.hpp", + ] +} + +source_set("spv_headers") { + deps = [ ":SPIRV-Headers_shim" ] +} diff --git a/build/linux/unbundle/swiftshader-SPIRV-Tools.gn b/build/linux/unbundle/swiftshader-SPIRV-Tools.gn new file mode 100644 index 000000000000..eb9d9224ec01 --- /dev/null +++ b/build/linux/unbundle/swiftshader-SPIRV-Tools.gn @@ -0,0 +1,32 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_SPIRV-Tools") { + packages = [ "SPIRV-Tools" ] +} + +shim_headers("SPIRV-Tools_shim") { + root_path = "../../../../third_party/SPIRV-Tools/include" + headers = [ + "spirv-tools/instrument.hpp", + "spirv-tools/libspirv.h", + "spirv-tools/libspirv.hpp", + "spirv-tools/linker.hpp", + "spirv-tools/optimizer.hpp", + ] +} + +source_set("spvtools_headers") { + deps = [ ":SPIRV-Tools_shim" ] + public_configs = [ ":system_SPIRV-Tools" ] +} + +source_set("spvtools_opt") { + deps = [ ":SPIRV-Tools_shim" ] + public_configs = [ ":system_SPIRV-Tools" ] +} + +source_set("spvtools_val") { + deps = [ ":SPIRV-Tools_shim" ] + public_configs = [ ":system_SPIRV-Tools" ] +} diff --git a/build/linux/unbundle/vulkan-SPIRV-Headers.gn b/build/linux/unbundle/vulkan-SPIRV-Headers.gn new file mode 100644 index 000000000000..eb2495ce2712 --- /dev/null +++ b/build/linux/unbundle/vulkan-SPIRV-Headers.gn @@ -0,0 +1,19 @@ +# This shim can only be used if you build Chromium without DAWN + +import("//build/shim_headers.gni") + +shim_headers("vulkan-SPIRV-Headers_shim") { + root_path = "include" + headers = [ + "spirv/unified1/GLSL.std.450.h", + "spirv/unified1/NonSemanticClspvReflection.h", + "spirv/unified1/NonSemanticDebugPrintf.h", + "spirv/unified1/OpenCL.std.h", + "spirv/unified1/spirv.h", + "spirv/unified1/spirv.hpp", + ] +} + +source_set("spv_headers") { + deps = [ ":vulkan-SPIRV-Headers_shim" ] +} diff --git a/build/linux/unbundle/vulkan-SPIRV-Tools.gn b/build/linux/unbundle/vulkan-SPIRV-Tools.gn new file mode 100644 index 000000000000..a65c64c6193a --- /dev/null +++ b/build/linux/unbundle/vulkan-SPIRV-Tools.gn @@ -0,0 +1,69 @@ +# This shim can only be used if you build Chromium without DAWN + +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("spvtools_internal_config") { + packages = [ "SPIRV-Tools" ] +} + +shim_headers("vulkan-SPIRV-Tools_shim") { + root_path = "include" + headers = [ + "spirv-tools/instrument.hpp", + "spirv-tools/libspirv.h", + "spirv-tools/libspirv.hpp", + "spirv-tools/linker.hpp", + "spirv-tools/optimizer.hpp", + ] +} + +source_set("SPIRV-Tools") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} + +source_set("spvtools") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} + +source_set("spvtools_core_enums_unified1") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} + +source_set("spvtools_core_tables_unified1") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} + +source_set("spvtools_headers") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} + +source_set("spvtools_language_header_cldebuginfo100") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} + +source_set("spvtools_language_header_debuginfo") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} + +source_set("spvtools_language_header_vkdebuginfo100") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} + +source_set("spvtools_opt") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} + +source_set("spvtools_val") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} diff --git a/build/linux/unbundle/woff2.gn b/build/linux/unbundle/woff2.gn new file mode 100644 index 000000000000..e7bae10fdc1d --- /dev/null +++ b/build/linux/unbundle/woff2.gn @@ -0,0 +1,20 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_woff2") { + packages = [ "libwoff2dec" ] +} + +shim_headers("woff2_shim") { + root_path = "include" + headers = [ + "woff2/decode.h", + "woff2/encode.h", + "woff2/output.h", + ] +} + +source_set("woff2_dec") { + deps = [ ":woff2_shim" ] + public_configs = [ ":system_woff2" ] +} diff --git a/build/linux/unbundle/zlib.gn b/build/linux/unbundle/zlib.gn index 97d6aebe6d53..2019a4064a84 100644 --- a/build/linux/unbundle/zlib.gn +++ b/build/linux/unbundle/zlib.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/locale_tool.py b/build/locale_tool.py index cad51908f667..c9fd395b4868 100755 --- a/build/locale_tool.py +++ b/build/locale_tool.py @@ -1,5 +1,5 @@ -#!/usr/bin/env vpython -# Copyright 2019 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -28,7 +28,6 @@ trying to fix it too, but at least the file will not be modified. """ -from __future__ import print_function import argparse import json @@ -322,10 +321,9 @@ def test_trailing_comma(self): ########################################################################## # Various list of locales that will be extracted from build/config/locales.gni -# Do not use these directly, use ChromeLocales(), AndroidAPKOmittedLocales() and -# IosUnsupportedLocales() instead to access these lists. +# Do not use these directly, use ChromeLocales(), and IosUnsupportedLocales() +# instead to access these lists. _INTERNAL_CHROME_LOCALES = [] -_INTERNAL_ANDROID_APK_OMITTED_LOCALES = [] _INTERNAL_IOS_UNSUPPORTED_LOCALES = [] @@ -336,13 +334,6 @@ def ChromeLocales(): return _INTERNAL_CHROME_LOCALES -def AndroidAPKOmittedLocales(): - """Return the list of locales omitted from Android APKs.""" - if not _INTERNAL_ANDROID_APK_OMITTED_LOCALES: - _ExtractAllChromeLocalesLists() - return _INTERNAL_ANDROID_APK_OMITTED_LOCALES - - def IosUnsupportedLocales(): """Return the list of locales that are unsupported on iOS.""" if not _INTERNAL_IOS_UNSUPPORTED_LOCALES: @@ -404,9 +395,6 @@ def _PrepareTinyGnWorkspace(work_dir, out_subdir_name='out'): # Write the locales lists to files in the output directory. _filename = root_build_dir + "/foo" write_file(_filename + ".locales", locales, "json") -write_file(_filename + ".android_apk_omitted_locales", - android_apk_omitted_locales, - "json") write_file(_filename + ".ios_unsupported_locales", ios_unsupported_locales, "json") @@ -461,10 +449,6 @@ def _ExtractAllChromeLocalesLists(): _INTERNAL_CHROME_LOCALES = _ReadJsonList( os.path.join(out_path, 'foo.locales')) - global _INTERNAL_ANDROID_APK_OMITTED_LOCALES - _INTERNAL_ANDROID_APK_OMITTED_LOCALES = _ReadJsonList( - os.path.join(out_path, 'foo.android_apk_omitted_locales')) - global _INTERNAL_IOS_UNSUPPORTED_LOCALES _INTERNAL_IOS_UNSUPPORTED_LOCALES = _ReadJsonList( os.path.join(out_path, 'foo.ios_unsupported_locales')) @@ -522,7 +506,7 @@ class _GetXmlLangAttributeTest(unittest.TestCase): } def test_GetXmlLangAttribute(self): - for test_line, expected in self.TEST_DATA.iteritems(): + for test_line, expected in self.TEST_DATA.items(): self.assertEquals(_GetXmlLangAttribute(test_line), expected) @@ -1286,9 +1270,8 @@ class _ListLocalesCommand(_Command): description = 'List supported Chrome locales' long_description = r''' List locales of interest, by default this prints all locales supported by -Chrome, but `--type=android_apk_omitted` can be used to print the list of -locales omitted from Android APKs (but not app bundles), and -`--type=ios_unsupported` for the list of locales unsupported on iOS. +Chrome, but `--type=ios_unsupported` can be used for the list of locales +unsupported on iOS. These values are extracted directly from build/config/locales.gni. @@ -1299,7 +1282,6 @@ class _ListLocalesCommand(_Command): # Maps type argument to a function returning the corresponding locales list. TYPE_MAP = { 'all': ChromeLocales, - 'android_apk_omitted': AndroidAPKOmittedLocales, 'ios_unsupported': IosUnsupportedLocales, } diff --git a/build/mac/find_sdk.py b/build/mac/find_sdk.py index d86f3109357a..3dcc4d5d36bf 100755 --- a/build/mac/find_sdk.py +++ b/build/mac/find_sdk.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. r"""Prints the lowest locally available SDK version greater than or equal to a @@ -21,9 +21,9 @@ 10.14 """ -from __future__ import print_function import os +import plistlib import re import subprocess import sys @@ -51,6 +51,9 @@ def main(): parser.add_option("--print_bin_path", action="store_true", dest="print_bin_path", default=False, help="Additionally print the path the toolchain bin dir.") + parser.add_option("--print_sdk_build", + action="store_true", dest="print_sdk_build", default=False, + help="Additionally print the build version of the SDK.") options, args = parser.parse_args() if len(args) != 1: parser.error('Please specify a minimum SDK version') @@ -80,20 +83,30 @@ def main(): if not sdks: raise Exception('No %s+ SDK found' % min_sdk_version) best_sdk = sorted(sdks, key=parse_version)[0] + sdk_name = 'MacOSX' + best_sdk + '.sdk' + sdk_path = os.path.join(sdk_dir, sdk_name) if options.print_sdk_path: - sdk_name = 'MacOSX' + best_sdk + '.sdk' - print(os.path.join(sdk_dir, sdk_name)) + print(sdk_path) if options.print_bin_path: bin_path = 'Toolchains/XcodeDefault.xctoolchain/usr/bin/' print(os.path.join(dev_dir, bin_path)) - return best_sdk + if options.print_sdk_build: + system_version_plist = os.path.join(sdk_path, + 'System/Library/CoreServices/SystemVersion.plist') + with open(system_version_plist, 'rb') as f: + system_version_info = plistlib.load(f) + if 'ProductBuildVersion' not in system_version_info: + raise Exception('Failed to determine ProductBuildVersion' + + 'for SDK at path %s' % system_version_plist) + print(system_version_info['ProductBuildVersion']) + + print(best_sdk) if __name__ == '__main__': if sys.platform != 'darwin': raise Exception("This script only runs on Mac") - print(main()) - sys.exit(0) + sys.exit(main()) diff --git a/build/mac/should_use_hermetic_xcode.py b/build/mac/should_use_hermetic_xcode.py index 63d44af85c79..e4cea4a5a1a6 100755 --- a/build/mac/should_use_hermetic_xcode.py +++ b/build/mac/should_use_hermetic_xcode.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -15,7 +15,6 @@ python should_use_hermetic_xcode.py """ -from __future__ import print_function import argparse import os diff --git a/build/mac_toolchain.py b/build/mac_toolchain.py index 43d336cf2f0b..cd253cd7923b 100755 --- a/build/mac_toolchain.py +++ b/build/mac_toolchain.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -18,8 +18,6 @@ the full revision, e.g. 9A235. """ -from __future__ import print_function - import argparse import os import pkg_resources @@ -32,20 +30,25 @@ def LoadPList(path): """Loads Plist at |path| and returns it as a dictionary.""" - if sys.version_info.major == 2: - return plistlib.readPlist(path) with open(path, 'rb') as f: return plistlib.load(f) -# This contains binaries from Xcode 12.4 12D4e, along with the macOS 11 SDK. -# To build these packages, see comments in build/xcode_binaries.yaml +# This contains binaries from Xcode 14.3 14E222b along with the macOS 13.3 SDK +# (13.3 22E245). To build these packages, see comments in +# build/xcode_binaries.yaml +# To update the version numbers, open Xcode's "About Xcode" for the first number +# and run `xcrun --show-sdk-build-version` for the second. +# To update the _TAG, use the output of the `cipd create` command mentioned in +# xcode_binaries.yaml. + MAC_BINARIES_LABEL = 'infra_internal/ios/xcode/xcode_binaries/mac-amd64' -MAC_BINARIES_TAG = 'Za4aUIwiTUjk8rnjRow4nXbth-j7ZoN5plyOSCLidcgC' +MAC_BINARIES_TAG = 'ajH0-Cuzzqtyj98qUlsgO1-lepRhXoVVNAjVXDIYHxcC' # The toolchain will not be downloaded if the minimum OS version is not met. 19 -# is the major version number for macOS 10.15. 12B5044c (Xcode 12.2rc) only runs -# on 10.15.4 and newer. +# is the major version number for macOS 10.15. Xcode 14.0 14B47b only runs on +# macOS 12.4 and newer, but some bots are still running older OS versions. macOS +# 10.15.4, the OS minimum through Xcode 12.4, still seems to work. MAC_MINIMUM_OS_VERSION = [19, 4] BASE_DIR = os.path.abspath(os.path.dirname(__file__)) @@ -161,11 +164,10 @@ def InstallXcodeBinaries(): return 0 # Use puppet's sudoers script to accept the license if its available. - license_accept_script = '/usr/local/bin/xcode_accept_license.py' + license_accept_script = '/usr/local/bin/xcode_accept_license.sh' if os.path.exists(license_accept_script): args = [ - 'sudo', license_accept_script, '--xcode-version', cipd_xcode_version, - '--license-version', cipd_license_version + 'sudo', license_accept_script, cipd_xcode_version, cipd_license_version ] subprocess.check_call(args) return 0 diff --git a/build/lacros/metadata.json.in b/build/metadata.json.in similarity index 100% rename from build/lacros/metadata.json.in rename to build/metadata.json.in diff --git a/build/nocompile.gni b/build/nocompile.gni index 4319312573c8..942ad9ecef46 100644 --- a/build/nocompile.gni +++ b/build/nocompile.gni @@ -1,4 +1,4 @@ -# Copyright (c) 2011 The Chromium Authors. All rights reserved. +# Copyright 2011 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -63,6 +63,10 @@ import("//build/config/python.gni") import("//build/toolchain/toolchain.gni") import("//testing/test.gni") +if (is_mac) { + import("//build/config/mac/mac_sdk.gni") +} + declare_args() { # TODO(crbug.com/105388): make sure no-compile test is not flaky. enable_nocompile_tests = (is_linux || is_chromeos || is_apple) && is_clang && @@ -96,24 +100,25 @@ if (enable_nocompile_tests) { "-nostdinc++", "-isystem" + rebase_path("$libcxx_prefix/include", root_build_dir), "-isystem" + rebase_path("$libcxxabi_prefix/include", root_build_dir), - "-std=c++14", + "-std=c++17", "-Wall", "-Werror", "-Wfatal-errors", "-Wthread-safety", "-I" + rebase_path("//", root_build_dir), "-I" + rebase_path("//third_party/abseil-cpp/", root_build_dir), + "-I" + rebase_path("//buildtools/third_party/libc++/", root_build_dir), "-I" + rebase_path(root_gen_dir, root_build_dir), # TODO(https://crbug.com/989932): Track build/config/compiler/BUILD.gn "-Wno-implicit-int-float-conversion", - - # TODO(crbug.com/1166707): libc++ now requires this macro to be defined. - "-D_LIBCPP_HAS_NO_VENDOR_AVAILABILITY_ANNOTATIONS", ] - if (is_apple && host_os != "mac") { - args += [ "--target=x86_64-apple-macos" ] + if (is_mac && host_os != "mac") { + args += [ + "--target=x86_64-apple-macos", + "-mmacos-version-min=$mac_deployment_target", + ] } # Iterate over any extra include dirs and append them to the command line. @@ -129,6 +134,13 @@ if (enable_nocompile_tests) { rebase_path(sysroot, root_build_dir), ] } + + if (!is_nacl) { + args += [ + # TODO(crbug.com/1343975) Evaluate and possibly enable. + "-Wno-deprecated-builtins", + ] + } } test(target_name) { diff --git a/build/noop.py b/build/noop.py index cf4d3ae305d2..6c7477591879 100644 --- a/build/noop.py +++ b/build/noop.py @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Script that does nothing successfully.""" diff --git a/build/partitioned_shared_library.gni b/build/partitioned_shared_library.gni index 2ea32ce7289e..2af4f9e93b5d 100644 --- a/build/partitioned_shared_library.gni +++ b/build/partitioned_shared_library.gni @@ -1,4 +1,4 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -62,7 +62,7 @@ template("partitioned_shared_library") { } ldflags += [ "-Wl,-soname,lib${_output_name}.so", - "--link-only", + "--partitioned-library", ] # This shared library is an intermediate artifact that should not packaged @@ -101,8 +101,7 @@ template("partitioned_shared_library") { } if (use_debug_fission) { - _dwp = rebase_path("${android_tool_prefix}dwp", root_build_dir) - args += [ "--dwp=${_dwp}" ] + args += [ "--split-dwarf" ] outputs += [ invoker.unstripped_output + ".dwp" ] } args += [ rebase_path(sources[0], root_build_dir) ] diff --git a/build/precompile.cc b/build/precompile.cc index db1ef6dfe551..8ae429349cc3 100644 --- a/build/precompile.cc +++ b/build/precompile.cc @@ -1,4 +1,4 @@ -// Copyright (c) 2011 The Chromium Authors. All rights reserved. +// Copyright 2011 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/precompile.h b/build/precompile.h index c699562c0c3f..d6e3dc11a31c 100644 --- a/build/precompile.h +++ b/build/precompile.h @@ -1,4 +1,4 @@ -// Copyright (c) 2012 The Chromium Authors. All rights reserved. +// Copyright 2012 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/print_python_deps.py b/build/print_python_deps.py index e56783477a02..07f988a87113 100755 --- a/build/print_python_deps.py +++ b/build/print_python_deps.py @@ -1,5 +1,5 @@ -#!/usr/bin/python2.7 -# Copyright 2016 The Chromium Authors. All rights reserved. +#!/usr/bin/env vpython3 +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -7,12 +7,9 @@ The primary use-case for this script is to generate the list of python modules required for .isolate files. - -This script should be compatible with Python 2 and Python 3. """ import argparse -import fnmatch import os import pipes import sys @@ -48,6 +45,13 @@ def ComputePythonDependencies(): return src_paths +def quote(string): + if string.count(' ') > 0: + return '"%s"' % string + else: + return string + + def _NormalizeCommandLine(options): """Returns a string that when run from SRC_ROOT replicates the command.""" args = ['build/print_python_deps.py'] @@ -61,12 +65,14 @@ def _NormalizeCommandLine(options): for allowlist in sorted(options.allowlists): args.extend(('--allowlist', os.path.relpath(allowlist, _SRC_ROOT))) args.append(os.path.relpath(options.module, _SRC_ROOT)) - return ' '.join(pipes.quote(x) for x in args) + if os.name == 'nt': + return ' '.join(quote(x) for x in args).replace('\\', '/') + else: + return ' '.join(pipes.quote(x) for x in args) def _FindPythonInDirectory(directory, allow_test): """Returns an iterable of all non-test python files in the given directory.""" - files = [] for root, _dirnames, filenames in os.walk(directory): for filename in filenames: if filename.endswith('.py') and (allow_test @@ -74,47 +80,19 @@ def _FindPythonInDirectory(directory, allow_test): yield os.path.join(root, filename) -def _GetTargetPythonVersion(module): - """Heuristically determines the target module's Python version.""" - with open(module) as f: - shebang = f.readline().strip() - default_version = 2 - if shebang.startswith('#!'): - # Examples: - # '#!/usr/bin/python' - # '#!/usr/bin/python2.7' - # '#!/usr/bin/python3' - # '#!/usr/bin/env python3' - # '#!/usr/bin/env vpython' - # '#!/usr/bin/env vpython3' - exec_name = os.path.basename(shebang[2:].split(' ')[-1]) - for python_prefix in ['python', 'vpython']: - if exec_name.startswith(python_prefix): - version_string = exec_name[len(python_prefix):] - break - else: - raise ValueError('Invalid shebang: ' + shebang) - if version_string: - return int(float(version_string)) - return default_version - - def _ImportModuleByPath(module_path): """Imports a module by its source file.""" # Replace the path entry for print_python_deps.py with the one for the given # module. sys.path[0] = os.path.dirname(module_path) - if sys.version_info[0] == 2: - import imp # Python 2 only, since it's deprecated in Python 3. - imp.load_source('NAME', module_path) - else: - # https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly - module_name = os.path.splitext(os.path.basename(module_path))[0] - import importlib.util # Python 3 only, since it's unavailable in Python 2. - spec = importlib.util.spec_from_file_location(module_name, module_path) - module = importlib.util.module_from_spec(spec) - sys.modules[module_name] = module - spec.loader.exec_module(module) + + # https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly + module_name = os.path.splitext(os.path.basename(module_path))[0] + import importlib.util # Python 3 only, since it's unavailable in Python 2. + spec = importlib.util.spec_from_file_location(module_name, module_path) + module = importlib.util.module_from_spec(spec) + sys.modules[module_name] = module + spec.loader.exec_module(module) def main(): @@ -158,35 +136,20 @@ def main(): if not modules: parser.error('Input directory does not contain any python files!') - target_versions = [_GetTargetPythonVersion(m) for m in modules] - target_version = target_versions[0] - assert target_version in [2, 3] - assert all(v == target_version for v in target_versions) - - current_version = sys.version_info[0] - - # Trybots run with vpython as default Python, but with a different config - # from //.vpython. To make the is_vpython test work, and to match the behavior - # of dev machines, the shebang line must be run with python2.7. - # - # E.g. $HOME/.vpython-root/dd50d3/bin/python - # E.g. /b/s/w/ir/cache/vpython/ab5c79/bin/python is_vpython = 'vpython' in sys.executable - if not is_vpython or target_version != current_version: + if not is_vpython: # Prevent infinite relaunch if something goes awry. assert not options.did_relaunch # Re-launch using vpython will cause us to pick up modules specified in # //.vpython, but does not cause it to pick up modules defined inline via # [VPYTHON:BEGIN] ... [VPYTHON:END] comments. # TODO(agrieve): Add support for this if the need ever arises. - vpython_to_use = {2: 'vpython', 3: 'vpython3'}[target_version] - os.execvp(vpython_to_use, [vpython_to_use] + sys.argv + ['--did-relaunch']) + os.execvp('vpython3', ['vpython3'] + sys.argv + ['--did-relaunch']) - if current_version == 3: - # Work-around for protobuf library not being loadable via importlib - # This is needed due to compile_resources.py. - import importlib._bootstrap_external - importlib._bootstrap_external._NamespacePath.sort = lambda self, **_: 0 + # Work-around for protobuf library not being loadable via importlib + # This is needed due to compile_resources.py. + import importlib._bootstrap_external + importlib._bootstrap_external._NamespacePath.sort = lambda self, **_: 0 paths_set = set() try: @@ -209,14 +172,14 @@ def main(): paths = [os.path.relpath(p, options.root) for p in paths_set] normalized_cmdline = _NormalizeCommandLine(options) - out = open(options.output, 'w') if options.output else sys.stdout + out = open(options.output, 'w', newline='') if options.output else sys.stdout with out: if not options.no_header: out.write('# Generated by running:\n') out.write('# %s\n' % normalized_cmdline) prefix = '//' if options.gn_paths else '' for path in sorted(paths): - out.write(prefix + path + '\n') + out.write(prefix + path.replace('\\', '/') + '\n') if __name__ == '__main__': diff --git a/build/private_code_test/BUILD.gn b/build/private_code_test/BUILD.gn new file mode 100644 index 000000000000..8fcdd54077a0 --- /dev/null +++ b/build/private_code_test/BUILD.gn @@ -0,0 +1,47 @@ +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/python.gni") +import("//build_overrides/build.gni") +import("private_code_test.gni") + +action("private_paths") { + script = "list_gclient_deps.py" + outputs = [ "$target_gen_dir/private_paths.txt" ] + args = [ + "--source-filter", + "chrome-internal", + "--output", + rebase_path(outputs[0], root_build_dir), + ] + inputs = [ "//../.gclient_entries" ] +} + +# --collect-inputs-only requires a source_set dep or !is_component_build. +if (!is_component_build) { + action("private_code_test_gclient_deps") { + script = "list_gclient_deps.py" + outputs = [ "$target_gen_dir/test_private_paths.txt" ] + args = [ + "--source-filter", + "v8.git", + "--output", + rebase_path(outputs[0], root_build_dir), + ] + inputs = [ "//../.gclient_entries" ] + } + + shared_library("private_code_test_inputs") { + deps = [ "//v8" ] + ldflags = [ "--collect-inputs-only" ] + } + + # Test that ensures the checker fails when it is supposed to. + private_code_test("private_code_failure_test") { + linker_inputs_dep = ":private_code_test_inputs" + private_paths_dep = ":private_code_test_gclient_deps" + private_paths_file = "$target_gen_dir/test_private_paths.txt" + expect_failure = true + } +} diff --git a/build/private_code_test/README.md b/build/private_code_test/README.md new file mode 100644 index 000000000000..75329b02a39a --- /dev/null +++ b/build/private_code_test/README.md @@ -0,0 +1,36 @@ +# Private Code Test + +This directory provides a mechanism for testing that native does not link in +object files from unwanted directories. The test finds all linker inputs, and +checks that none live inside a list of internal paths. + +Original bug: https://bugs.chromium.org/p/chromium/issues/detail?id=1266989 + +## Determining Internal Directories + +This is done by parsing the `.gclient_entries` file for all paths coming from +https://chrome-internal.googlesource.com. I chose this approach since it is +simple. + +The main alternative I found was to use `gclient flatten`. Example output: + +``` + # src -> src-internal + "src-internal": { + "url": "https://chrome-internal.googlesource.com/chrome/src-internal.git@c649c6a155fe65c3730e2d663d7d2058d33bf1f9", + "condition": 'checkout_src_internal', + }, +``` + +* Paths could be found in this way by looking for `checkout_src_internal` + within `condition`, and by looking for the comment line for `recurse_deps` + that went through an internal repo. + +## Determining Linker Inputs + +This is done by performing a custom link step with a linker that just records +inputs. This seemed like the simplest approach. + +Two alternatives: +1) Dump paths found in debug information. +2) Scan a linker map file for input paths. diff --git a/build/private_code_test/list_gclient_deps.py b/build/private_code_test/list_gclient_deps.py new file mode 100755 index 000000000000..6a34fc4abae4 --- /dev/null +++ b/build/private_code_test/list_gclient_deps.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import pathlib +import sys + +_REPO_ROOT = pathlib.Path(__file__).resolve().parents[3] +_ENTRIES_FILE = _REPO_ROOT / '.gclient_entries' + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--source-filter', required=True) + parser.add_argument('--output', required=True) + args = parser.parse_args() + + source_filter = args.source_filter + + # Ninja validates that the file exists since it's marked as an input. + try: + text = _ENTRIES_FILE.read_text() + result = {} + exec(text, result) + entries = result['entries'] + private_dirs = sorted(d for d, s in entries.items() + if s and source_filter in s) + except Exception as e: + # Make the test fail rather than the compile step so that failures here do + # not prevent other bot functionality. + private_dirs = [ + '# ERROR parsing .gclient_entries', + str(e), '', 'File was:', text + ] + + pathlib.Path(args.output).write_text('\n'.join(private_dirs) + '\n') + + +if __name__ == '__main__': + main() diff --git a/build/private_code_test/private_code_test.gni b/build/private_code_test/private_code_test.gni new file mode 100644 index 000000000000..6ce82f0328bd --- /dev/null +++ b/build/private_code_test/private_code_test.gni @@ -0,0 +1,63 @@ +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//testing/test.gni") + +template("private_code_test") { + isolated_script_test(target_name) { + forward_variables_from(invoker, + [ + "data", + "data_deps", + ]) + script = "//build/private_code_test/private_code_test.py" + _linker_inputs_dep = invoker.linker_inputs_dep + if (shlib_prefix != "") { + _so_name = shlib_prefix + get_label_info(_linker_inputs_dep, "name") + _so_name = string_replace(_so_name, + "${shlib_prefix}${shlib_prefix}", + shlib_prefix) + } + _dir = get_label_info(_linker_inputs_dep, "root_out_dir") + if (is_android) { + _dir += "/lib.unstripped" + } + _linker_inputs_file = "$_dir/${_so_name}$shlib_extension" + if (defined(invoker.private_paths_dep)) { + _private_paths_dep = invoker.private_paths_dep + _private_paths_file = invoker.private_paths_file + } else { + _private_paths_dep = + "//build/private_code_test:private_paths($default_toolchain)" + _private_paths_file = + get_label_info(_private_paths_dep, "target_gen_dir") + + "/private_paths.txt" + } + + data_deps = [ + _linker_inputs_dep, + _private_paths_dep, + ] + args = [ + "--linker-inputs", + "@WrappedPath(" + rebase_path(_linker_inputs_file, root_build_dir) + ")", + "--private-paths-file", + "@WrappedPath(" + rebase_path(_private_paths_file, root_build_dir) + ")", + "--root-out-dir", + rebase_path(get_label_info(_linker_inputs_dep, "root_out_dir"), + root_build_dir), + ] + if (defined(invoker.allowed_violations)) { + foreach(_glob, invoker.allowed_violations) { + args += [ + "--allow-violation", + _glob, + ] + } + } + if (defined(invoker.expect_failure) && invoker.expect_failure) { + args += [ "--expect-failure" ] + } + } +} diff --git a/build/private_code_test/private_code_test.py b/build/private_code_test/private_code_test.py new file mode 100755 index 000000000000..a164741a4b11 --- /dev/null +++ b/build/private_code_test/private_code_test.py @@ -0,0 +1,135 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Tests that no linker inputs are from private paths.""" + +import argparse +import fnmatch +import os +import pathlib +import sys + +_DIR_SRC_ROOT = pathlib.Path(__file__).resolve().parents[2] + + +def _print_paths(paths, limit): + for path in paths[:limit]: + print(path) + if len(paths) > limit: + print(f'... and {len(paths) - limit} more.') + print() + + +def _apply_allowlist(found, globs): + ignored_paths = [] + new_found = [] + for path in found: + for pattern in globs: + if fnmatch.fnmatch(path, pattern): + ignored_paths.append(path) + break + else: + new_found.append(path) + return new_found, ignored_paths + + +def _find_private_paths(linker_inputs, private_paths, root_out_dir): + seen = set() + found = [] + for linker_input in linker_inputs: + dirname = os.path.dirname(linker_input) + if dirname in seen: + continue + + to_check = dirname + # Strip ../ prefix. + if to_check.startswith('..'): + to_check = os.path.relpath(to_check, _DIR_SRC_ROOT) + else: + if root_out_dir: + # Strip secondary toolchain subdir + to_check = to_check[len(root_out_dir) + 1:] + # Strip top-level dir (e.g. "obj", "gen"). + parts = to_check.split(os.path.sep, 1) + if len(parts) == 1: + continue + to_check = parts[1] + + if any(to_check.startswith(p) for p in private_paths): + found.append(linker_input) + else: + seen.add(dirname) + return found + + +def _read_private_paths(path): + text = pathlib.Path(path).read_text() + + # Check if .gclient_entries was not valid. https://crbug.com/1427829 + if text.startswith('# ERROR: '): + sys.stderr.write(text) + sys.exit(1) + + # Remove src/ prefix from paths. + # We care only about paths within src/ since GN cannot reference files + # outside of // (and what would the obj/ path for them look like?). + ret = [p[4:] for p in text.splitlines() if p.startswith('src/')] + if not ret: + sys.stderr.write(f'No src/ paths found in {args.private_paths_file}\n') + sys.stderr.write(f'This test should not be run on public bots.\n') + sys.stderr.write(f'File contents:\n') + sys.stderr.write(text) + sys.exit(1) + + return ret + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--linker-inputs', + required=True, + help='Path to file containing one linker input per line, ' + 'relative to --root-out-dir') + parser.add_argument('--private-paths-file', + required=True, + help='Path to file containing list of paths that are ' + 'considered private, relative gclient root.') + parser.add_argument('--root-out-dir', + required=True, + help='See --linker-inputs.') + parser.add_argument('--allow-violation', + action='append', + help='globs of private paths to allow.') + parser.add_argument('--expect-failure', + action='store_true', + help='Invert exit code.') + args = parser.parse_args() + + private_paths = _read_private_paths(args.private_paths_file) + linker_inputs = pathlib.Path(args.linker_inputs).read_text().splitlines() + + root_out_dir = args.root_out_dir + if root_out_dir == '.': + root_out_dir = '' + + found = _find_private_paths(linker_inputs, private_paths, root_out_dir) + + if args.allow_violation: + found, ignored_paths = _apply_allowlist(found, args.allow_violation) + if ignored_paths: + print('Ignoring {len(ignored_paths)} allowlisted private paths:') + _print_paths(sorted(ignored_paths), 10) + + if found: + limit = 10 if args.expect_failure else 1000 + print(f'Found {len(found)} private paths being linked into public code:') + _print_paths(found, limit) + elif args.expect_failure: + print('Expected to find a private path, but none were found.') + + sys.exit(0 if bool(found) == args.expect_failure else 1) + + +if __name__ == '__main__': + main() diff --git a/build/protoc_java.py b/build/protoc_java.py index fe602a9fc9aa..8f25e3a5e6c7 100755 --- a/build/protoc_java.py +++ b/build/protoc_java.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -15,7 +15,6 @@ 4. Creates a new stamp file. """ -from __future__ import print_function import argparse import os @@ -23,6 +22,9 @@ import subprocess import sys +import action_helpers +import zip_helpers + sys.path.append(os.path.join(os.path.dirname(__file__), 'android', 'gyp')) from util import build_utils @@ -42,8 +44,9 @@ def _EnforceJavaPackage(proto_srcs): def main(argv): parser = argparse.ArgumentParser() - build_utils.AddDepfileOption(parser) + action_helpers.add_depfile_arg(parser) parser.add_argument('--protoc', required=True, help='Path to protoc binary.') + parser.add_argument('--plugin', help='Path to plugin executable') parser.add_argument('--proto-path', required=True, help='Path to proto directory.') @@ -65,15 +68,22 @@ def main(argv): _EnforceJavaPackage(options.protos) with build_utils.TempDir() as temp_dir: - out_arg = '--java_out=lite:' + temp_dir + protoc_args = [] + + generator = 'java' + if options.plugin: + generator = 'plugin' + protoc_args += ['--plugin', 'protoc-gen-plugin=' + options.plugin] - proto_path_args = ['--proto_path', options.proto_path] + protoc_args += ['--proto_path', options.proto_path] for path in options.import_dir: - proto_path_args += ["--proto_path", path] + protoc_args += ['--proto_path', path] + + protoc_args += ['--' + generator + '_out=lite:' + temp_dir] # Generate Java files using protoc. build_utils.CheckOutput( - [options.protoc] + proto_path_args + [out_arg] + options.protos, + [options.protoc] + protoc_args + options.protos, # protoc generates superfluous warnings about LITE_RUNTIME deprecation # even though we are using the new non-deprecated method. stderr_filter=lambda output: build_utils.FilterLines( @@ -84,12 +94,13 @@ def main(argv): build_utils.DeleteDirectory(options.java_out_dir) shutil.copytree(temp_dir, options.java_out_dir) else: - build_utils.ZipDir(options.srcjar, temp_dir) + with action_helpers.atomic_output(options.srcjar) as f: + zip_helpers.zip_directory(f, temp_dir) if options.depfile: assert options.srcjar deps = options.protos + [options.protoc] - build_utils.WriteDepfile(options.depfile, options.srcjar, deps) + action_helpers.write_depfile(options.depfile, options.srcjar, deps) if options.stamp: build_utils.Touch(options.stamp) diff --git a/build/protoc_java.pydeps b/build/protoc_java.pydeps index c3ed2be81add..467907f86a8e 100644 --- a/build/protoc_java.pydeps +++ b/build/protoc_java.pydeps @@ -1,6 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build --output build/protoc_java.pydeps build/protoc_java.py +action_helpers.py android/gyp/util/__init__.py android/gyp/util/build_utils.py gn_helpers.py protoc_java.py +zip_helpers.py diff --git a/build/redirect_stdout.py b/build/redirect_stdout.py index 166293cb38aa..16494fa6cf00 100644 --- a/build/redirect_stdout.py +++ b/build/redirect_stdout.py @@ -1,9 +1,9 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import print_function +import os import subprocess import sys @@ -17,5 +17,12 @@ print("Usage: %s output_file command..." % sys.argv[0], file=sys.stderr) sys.exit(1) + # This script is designed to run binaries produced by the current build. We + # may prefix it with "./" to avoid picking up system versions that might + # also be on the path. + path = sys.argv[2] + if not os.path.isabs(path): + path = './' + path + with open(sys.argv[1], 'w') as fp: - sys.exit(subprocess.check_call(sys.argv[2:], stdout=fp)) + sys.exit(subprocess.check_call([path] + sys.argv[3:], stdout=fp)) diff --git a/build/rm.py b/build/rm.py index 43a663d6df15..11e8a6439900 100755 --- a/build/rm.py +++ b/build/rm.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright (c) 2016 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -8,7 +8,6 @@ This module works much like the rm posix command. """ -from __future__ import print_function import argparse import os diff --git a/build/rust/BUILD.gn b/build/rust/BUILD.gn new file mode 100644 index 000000000000..d7ae149c265f --- /dev/null +++ b/build/rust/BUILD.gn @@ -0,0 +1,84 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/buildflag_header.gni") +import("//build/config/rust.gni") + +if (toolchain_has_rust) { + config("edition_2021") { + rustflags = [ "--edition=2021" ] + } + + config("edition_2018") { + rustflags = [ "--edition=2018" ] + } + + config("edition_2015") { + rustflags = [ "--edition=2015" ] + } + + # The required dependencies for cxx-generated bindings, that must be included + # on the C++ side. + static_library("cxx_cppdeps") { + sources = [ + "//third_party/rust/cxx/v1/crate/include/cxx.h", + "//third_party/rust/cxx/v1/crate/src/cxx.cc", + ] + + defines = [ "RUST_CXX_NO_EXCEPTIONS" ] + + if (is_win) { + defines += [ "CXX_RS_EXPORT=__declspec(dllexport)" ] + } else { + defines += [ "CXX_RS_EXPORT=__attribute__((visibility(\"default\")))" ] + } + + # Depending on the C++ bindings side of cxx then requires also depending + # on the Rust bindings, since one calls the other. And the Rust bindings + # require the Rust standard library. + # Normally the Rust stdlib is brought in as a dependency by depending + # on any first-party Rust target. But in this case, it's conceivable + # that pure-C++ targets will not depend on any 1p Rust code so we'll add + # the Rust stdlib explicitly. + deps = [ ":cxx_rustdeps" ] + + if (use_local_std_by_default) { + deps += [ "//build/rust/std:link_local_std" ] + } else { + assert(prebuilt_libstd_supported, + "Prebuilt Rust stdlib is not available for this target") + deps += [ "//build/rust/std:link_prebuilt_std" ] + } + } + + # The required dependencies for cxx-generated bindings, that must be included + # on the Rust side. + group("cxx_rustdeps") { + public_deps = [ "//third_party/rust/cxx/v1:lib" ] + } +} + +# Enables code behind #[cfg(test)]. This should only be used for targets where +# testonly=true. +config("test") { + rustflags = [ + "--cfg", + "test", + ] +} + +# TODO(crbug.com/gn/104): GN rust_proc_macro targets are missing this +# command line flag, for the proc_macro crate which is provided by rustc for +# compiling proc-macros. +config("proc_macro_extern") { + rustflags = [ + "--extern", + "proc_macro", + ] +} + +# Forbids unsafe code in crates with this config. +config("forbid_unsafe") { + rustflags = [ "-Funsafe_code" ] +} diff --git a/build/rust/OWNERS b/build/rust/OWNERS new file mode 100644 index 000000000000..0e7aca6e184a --- /dev/null +++ b/build/rust/OWNERS @@ -0,0 +1,7 @@ +adetaylor@chromium.org +ajgo@chromium.org +collinbaker@chromium.org +danakj@chromium.org +lukasza@chromium.org +rsesek@chromium.org +thakis@chromium.org diff --git a/build/rust/analyze.gni b/build/rust/analyze.gni new file mode 100644 index 000000000000..36c06112d590 --- /dev/null +++ b/build/rust/analyze.gni @@ -0,0 +1,79 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/compute_inputs_for_analyze.gni") +import("//build/config/rust.gni") + +if (compute_inputs_for_analyze) { + template("analyze_rust") { + _target_name = target_name + assert(defined(invoker.crate_root)) + + action("${_target_name}_collect_sources") { + forward_variables_from(invoker, + "*", + TESTONLY_AND_VISIBILITY + [ + "inputs", + "script", + "sources", + "depfile", + "outputs", + "args", + ]) + forward_variables_from(invoker, [ "testonly" ]) + + script = "//build/rust/collect_rust_sources.py" + depfile = "${target_gen_dir}/${target_name}.verify.d" + outputs = [ depfile ] + + args = [ + "--generate-depfile", + "${rust_sysroot}/bin/rustc", + rebase_path(crate_root, root_build_dir), + rebase_path(depfile, root_build_dir), + "{{rustflags}}", + ] + } + + action(_target_name) { + forward_variables_from(invoker, [ "testonly" ]) + + # Constructs a depfile of all rust sources in the crate. + deps = [ ":${_target_name}_collect_sources" ] + + # This target is reached once during `gn gen` and then again during + # `gn analyze`. + # + # 1. When doing `gn gen`, the ':${_target_name}_collect_sources' + # target generates a depfile containing all the rust sources of + # the crate. The exec_script() below runs first, and it produces an + # empty result. + # 2. When doing `gn analyze`, the exec_script() reads the depfile that + # was written during `gn gen` and puts each Rust file in the crate + # into `inputs`. + depfile_path = [] + foreach(d, get_target_outputs(":${_target_name}_collect_sources")) { + depfile_path += [ rebase_path(d, root_build_dir) ] + } + + # Here we read the depfile from `gn gen` when doing `gn analyze`, and + # add all the rust files in the crate to `inputs`. This ensures that + # analyze considers them as affecting tests that depend on the crate. + rust_srcs = exec_script("//build/rust/collect_rust_sources.py", + [ "--read-depfile" ] + depfile_path, + "list lines") + inputs = [] + foreach(s, rust_srcs) { + inputs += [ rebase_path(s, "//", root_build_dir) ] + } + script = "//build/rust/collect_rust_sources.py" + args = [ + "--stamp", + rebase_path("${target_gen_dir}/${target_name}.verify.stamp", + root_build_dir), + ] + outputs = [ "${target_gen_dir}/${target_name}.verify.stamp" ] + } + } +} diff --git a/build/rust/cargo_crate.gni b/build/rust/cargo_crate.gni new file mode 100644 index 000000000000..0456a25cc289 --- /dev/null +++ b/build/rust/cargo_crate.gni @@ -0,0 +1,340 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/rust.gni") +import("//build/rust/rust_target.gni") + +# This template allows for building Cargo crates within gn. +# +# It is intended for use with pre-existing (third party) code and +# is none too efficient. (It will stall the build pipeline whilst +# it runs build scripts to work out what flags are needed). First +# party code should directly use first-class gn targets, such as +# //build/rust/rust_static_library.gni or similar. +# +# Because it's intended for third-party code, it automatically +# defaults to //build/config/compiler:no_chromium_code which +# suppresses some warnings. If you *do* use this for first party +# code, you should remove that config and add the equivalent +# //build/config/compiler:chromium_code config. +# +# Arguments: +# sources +# crate_root +# epoch +# deps +# aliased_deps +# features +# build_native_rust_unit_tests +# edition +# crate_name +# All just as in rust_static_library.gni +# library_configs/executable_configs +# All just as in rust_target.gni +# +# dev_deps +# Same meaning as test_deps in rust_static_library.gni, but called +# dev_deps to match Cargo.toml better. +# +# build_root (optional) +# Filename of build.rs build script. +# +# build_deps (optional) +# Build script dependencies +# +# build_sources (optional) +# List of sources for build script. Must be specified if +# build_root is specified. +# +# build_script_outputs (optional) +# List of .rs files generated by the build script, if any. +# Fine to leave undefined even if you have a build script. +# This doesn't directly correspond to any Cargo variable, +# but unfortunately is necessary for gn to build its dependency +# trees automatically. +# Many build scripts just output --cfg directives, in which case +# no source code is generated and this can remain empty. +# +# build_script_inputs (optional) +# If the build script reads any files generated by build_deps, +# as opposed to merely linking against them, add a list of such +# files here. Again, this doesn't correspond to a Cargo variable +# but is necessary for gn. +# +# crate_type "bin", "proc-macro" or "rlib" (optional) +# Whether to build an executable. The default is "rlib". +# At present others are not supported. +# +# cargo_pkg_authors +# cargo_pkg_version +# cargo_pkg_name +# cargo_pkg_description +# Strings as found within 'version' and similar fields within Cargo.toml. +# Converted to environment variables passed to rustc, in case the crate +# uses clap `crate_version!` or `crate_authors!` macros (fairly common in +# command line tool help) + +template("cargo_crate") { + _orig_target_name = target_name + + _crate_name = _orig_target_name + if (defined(invoker.crate_name)) { + _crate_name = invoker.crate_name + } + + # Executables need to have unique names. Work out a prefix. + if (defined(invoker.build_root)) { + _epochlabel = "vunknown" + if (defined(invoker.epoch)) { + _tempepoch = string_replace(invoker.epoch, ".", "_") + _epochlabel = "v${_tempepoch}" + } + + # This name includes the target name to ensure it's unique for each possible + # build target in the same BUILD.gn file. + _build_script_name = + "${_crate_name}_${target_name}_${_epochlabel}_build_script" + + # Where the OUT_DIR will point when running the build script exe, and + # compiling the crate library/binaries. This directory must include the + # target name to avoid collisions between multiple GN targets that exist + # in the same BUILD.gn. + _build_script_env_out_dir = "$target_gen_dir/$target_name" + } + + _rustenv = [] + if (defined(invoker.rustenv)) { + _rustenv = invoker.rustenv + } + if (defined(invoker.cargo_pkg_authors)) { + _rustenv += [ "CARGO_PKG_AUTHORS=${invoker.cargo_pkg_authors}" ] + } + if (defined(invoker.cargo_pkg_version)) { + _rustenv += [ "CARGO_PKG_VERSION=${invoker.cargo_pkg_version}" ] + } + if (defined(invoker.cargo_pkg_name)) { + _rustenv += [ "CARGO_PKG_NAME=${invoker.cargo_pkg_name}" ] + } + if (defined(invoker.cargo_pkg_description)) { + _rustenv += [ "CARGO_PKG_DESCRIPTION=${invoker.cargo_pkg_description}" ] + } + + # The main target, either a Rust source set or an executable. + rust_target(target_name) { + forward_variables_from(invoker, + "*", + TESTONLY_AND_VISIBILITY + [ + "build_root", + "build_deps", + "build_sources", + "build_script_inputs", + "build_script_outputs", + "unit_test_target", + "target_type", + "configs", + "rustenv", + ]) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + # Work out what we're building. + crate_type = "rlib" + if (defined(invoker.crate_type)) { + crate_type = invoker.crate_type + } + + # TODO(crbug.com/1422745): don't default to true. This requires changes to + # third_party.toml and gnrt when generating third-party build targets. + allow_unsafe = true + + if (!defined(rustflags)) { + rustflags = [] + } + rustenv = _rustenv + if (crate_type == "bin") { + target_type = "executable" + assert(!defined(invoker.epoch)) + } else if (crate_type == "proc-macro") { + target_type = "rust_proc_macro" + } else { + assert(crate_type == "rlib") + target_type = "rust_library" + } + + if (!defined(build_native_rust_unit_tests)) { + build_native_rust_unit_tests = true + } + + # The unit tests for each target, if generated, should be unique as well. + # a) It needs to be unique even if multiple build targets have the same + # `crate_name`, but different target names. + # b) It needs to be unique even if multiple build targets have the same + # `crate_name` and target name, but different epochs. + _unit_test_unique_target_name = "" + if (_crate_name != _orig_target_name) { + _unit_test_unique_target_name = "${_orig_target_name}_" + } + _unit_test_unique_epoch = "" + if (defined(invoker.epoch)) { + _epoch_str = string_replace(invoker.epoch, ".", "_") + _unit_test_unique_epoch = "v${_epoch_str}_" + } + if (defined(output_dir) && output_dir != "") { + unit_test_output_dir = output_dir + } + unit_test_target = "${_unit_test_unique_target_name}${_crate_name}_${_unit_test_unique_epoch}unittests" + + if ((!defined(output_dir) || output_dir == "") && crate_type == "rlib") { + # Cargo crate rlibs can be compiled differently for tests, and must not + # collide with the production outputs. This does *not* override the + # unit_test_output_dir, which is set above, as that target is not an rlib. + output_dir = "$target_out_dir/$_orig_target_name" + } + + if (defined(invoker.build_root)) { + # Uh-oh, we have a build script + if (!defined(deps)) { + deps = [] + } + if (!defined(sources)) { + sources = [] + } + if (defined(invoker.dev_deps)) { + test_deps = invoker.dev_deps + } + + # This... is a bit weird. We generate a file called cargo_flags.rs which + # does not actually contain Rust code, but instead some flags to add + # to the rustc command line. We need it to end in a .rs extension so that + # we can include it in the 'sources' line and thus have dependency + # calculation done correctly. data_deps won't work because targets don't + # require them to be present until runtime. + flags_file = "$_build_script_env_out_dir/cargo_flags.rs" + rustflags += [ "@" + rebase_path(flags_file, root_build_dir) ] + sources += [ flags_file ] + if (defined(invoker.build_script_outputs)) { + # Build scripts may output arbitrary files. They are usually included in + # the main Rust target using include! or include_str! and therefore the + # filename may be .rs or may be arbitrary. We want to educate ninja + # about the dependency either way. + foreach(extra_source, + filter_include(invoker.build_script_outputs, [ "*.rs" ])) { + sources += [ "$_build_script_env_out_dir/$extra_source" ] + } + inputs = [] + foreach(extra_source, + filter_exclude(invoker.build_script_outputs, [ "*.rs" ])) { + inputs += [ "$_build_script_env_out_dir/$extra_source" ] + } + } + deps += [ ":${_build_script_name}_output" ] + } + } + + if (defined(invoker.build_root)) { + # Extra targets required to make build script work + action("${_build_script_name}_output") { + script = rebase_path("//build/rust/run_build_script.py") + build_script_target = + ":${_build_script_name}($host_toolchain_no_sanitizers)" + deps = [ build_script_target ] + + # The build script output is always in the name-specific output dir. It + # may be built with a different toolchain when cross-compiling (the host + # toolchain) so we must find the path relative to that. + _build_script_target_out_dir = + get_label_info(build_script_target, "target_out_dir") + _build_script_exe = + "$_build_script_target_out_dir/$_orig_target_name/$_build_script_name" + if (is_win) { + _build_script_exe = "${_build_script_exe}.exe" + } + + _flags_file = "$_build_script_env_out_dir/cargo_flags.rs" + + inputs = [ _build_script_exe ] + outputs = [ _flags_file ] + args = [ + "--build-script", + rebase_path(_build_script_exe, root_build_dir), + "--output", + rebase_path(_flags_file, root_build_dir), + "--rust-prefix", + rebase_path("${rust_sysroot}/bin"), + "--out-dir", + rebase_path(_build_script_env_out_dir, root_build_dir), + "--src-dir", + rebase_path(get_path_info(invoker.build_root, "dir"), root_build_dir), + ] + if (defined(rust_abi_target) && rust_abi_target != "") { + args += [ + "--target", + rust_abi_target, + ] + } + if (defined(invoker.features)) { + args += [ "--features" ] + args += invoker.features + } + if (defined(invoker.build_script_outputs)) { + args += [ "--generated-files" ] + args += invoker.build_script_outputs + foreach(generated_file, invoker.build_script_outputs) { + outputs += [ "$_build_script_env_out_dir/$generated_file" ] + } + } + if (_rustenv != []) { + args += [ "--env" ] + args += _rustenv + } + if (defined(invoker.build_script_inputs)) { + inputs += invoker.build_script_inputs + } + } + + if (current_toolchain == host_toolchain_no_sanitizers) { + rust_target(_build_script_name) { + target_type = "executable" + sources = invoker.build_sources + crate_root = invoker.build_root + if (defined(invoker.build_deps)) { + deps = invoker.build_deps + } + + # An rlib's build script may be built differently for tests and for + # production, so they must be in a name specific to the GN target. The + # ${_build_script_name}_output target looks for the exe in this + # location. + output_dir = "$target_out_dir/$_orig_target_name" + rustenv = _rustenv + forward_variables_from(invoker, + [ + "features", + "edition", + "rustflags", + ]) + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + } + } else { + not_needed(invoker, + [ + "build_sources", + "build_deps", + "build_root", + "build_script_inputs", + "build_script_outputs", + ]) + } + } else { + not_needed([ + "_name_specific_output_dir", + "_orig_target_name", + ]) + } +} + +set_defaults("cargo_crate") { + library_configs = default_compiler_configs + executable_configs = default_executable_configs +} diff --git a/build/rust/collect_rust_sources.py b/build/rust/collect_rust_sources.py new file mode 100755 index 000000000000..48f2f1f52335 --- /dev/null +++ b/build/rust/collect_rust_sources.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +'''Is used to find all rust files in a crate, and write the result to a +depfile. Then, used again to read the same depfile and pull out just the +source files. Lastly, it is also used to write a stamp file at the same +location as the depfile.''' + +import argparse +import re +import subprocess +import sys + +FILE_REGEX = re.compile('^(.*):') + + +def main(): + parser = argparse.ArgumentParser( + description='Collect Rust sources for a crate') + parser.add_argument('--stamp', + action='store_true', + help='Generate a stamp file') + parser.add_argument('--generate-depfile', + action='store_true', + help='Generate a depfile') + parser.add_argument('--read-depfile', + action='store_true', + help='Read the previously generated depfile') + args, rest = parser.parse_known_args() + + if (args.stamp): + stampfile = rest[0] + with open(stampfile, "w") as f: + f.write("stamp") + elif (args.generate_depfile): + rustc = rest[0] + crate_root = rest[1] + depfile = rest[2] + rustflags = rest[3:] + + rustc_args = [ + "--emit=dep-info=" + depfile, "-Zdep-info-omit-d-target", crate_root + ] + subprocess.check_call([rustc] + rustc_args + rustflags) + elif (args.read_depfile): + depfile = rest[0] + try: + with open(depfile, "r") as f: + files = [FILE_REGEX.match(l) for l in f.readlines()] + for f in files: + if f: + print(f.group(1)) + except: + pass + else: + print("ERROR: Unknown action") + parser.print_help() + return 1 + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/rust/filter_clang_args.py b/build/rust/filter_clang_args.py new file mode 100644 index 000000000000..5a1843c0df07 --- /dev/null +++ b/build/rust/filter_clang_args.py @@ -0,0 +1,31 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +""" +Filters clang args to make them suitable for libclang. + +Rust involves several libclang-based tools that parse C++. +We pass such tools our complete {{cflags}}, but a few of the +arguments aren't appropriate for libclang (for example those +which load plugins). + +This function filters them out. +""" + + +def filter_clang_args(clangargs): + def do_filter(args): + i = 0 + while i < len(args): + # Intercept plugin arguments + if args[i] == '-Xclang': + i += 1 + if args[i] == '-add-plugin': + pass + elif args[i].startswith('-plugin-arg'): + i += 2 + else: + yield args[i] + i += 1 + + return list(do_filter(clangargs)) diff --git a/build/rust/rs_bindings_from_cc.gni b/build/rust/rs_bindings_from_cc.gni new file mode 100644 index 000000000000..9bd08cff86ab --- /dev/null +++ b/build/rust/rs_bindings_from_cc.gni @@ -0,0 +1,297 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/clang/clang.gni") +import("//build/config/rust.gni") +import("//build/config/sysroot.gni") +import("//build/rust/mixed_static_library.gni") + +# Template to generate and build Rust bindings for a set of C++ headers using +# Crubit's `rs_bindings_from_cc` tool. +# +# This template expands to a `mixed_static_library` named "_rs_api" and +# containing the Rust side of the bindings (as well as internal C++ thunks +# needed to support the bindings). +# +# The generated out/.../gen/.../_rs_api.rs is machine-generated, but +# should be fairly readable (inspecting it might be useful to discover the +# imported bindings and their shape). +# +# Parameters: +# +# bindings_target: +# The C++ target (e.g. a `source_set`) that Rust bindings should be +# generated for. +# +# public_headers: +# The .h files to generate bindings for. +# +# Implementation note: This doesn't just take *all* the headers of the +# `bindings_target`, because typically only a *subset* of headers provides +# the *public* API that bindings are needed for. +# +# TODO(crbug.com/1329611): Internal headers should still to be included in +# the targets_and_headers metadata... +# +# deps: +# Other `rs_bindings_from_cc` targets that the bindings need to depend on +# (e.g. because APIs in the `public_headers` refer to `struct`s declared in +# those other targets. Note how in the usage example below bindings for +# `struct Goat` are provided by `goat_rs_api`, and that therefore the +# bindings for the `TeleportGoat` provided by `teleport_rs_api` depend on +# `goat_rs_api`). +# +# Oftentimes `deps` can be a copy of the `public_deps` of the +# `bindings_target`, but depending on targets with the suffix "_rs_api". +# Still, there are scenarios where `deps` don't parallel *all* entries from +# `public_deps`: +# * `public_deps` that don't expose Rust APIs (i.e. there are no +# "..._rs_api" targets to depend on). +# * `public_deps` that Crubit bindings don't depend on (dependencies that +# don't provide re-exportable C++ APIs, or that only provide items +# that are ignored by Crubit - e.g. `#define`s). +# +# Usage example: +# +# BUILD.gn: +# import("//build/rust/rs_bindings_from_cc.gni") +# import("//build/rust/rust_executable.gni") +# +# rust_executable("my_target") { +# crate_root = "main.rs" +# sources = [ "main.rs" ] +# deps = [ ":teleport_rs_api" ] +# ] +# +# # This will generate "teleport_rs_api" target that provides Rust +# # bindings for the "teleport.h" header from the ":teleport" source +# # set. +# rs_bindings_from_cc("teleport_rs_api") { +# bindings_target = ":teleport" +# public_headers = ["teleport.h"] +# deps = [ ":goat_rs_api" ] # Parallel's `public_deps` of ":teleport". +# } +# +# source_set("teleport") { +# sources = [ "teleport.h", ... ] +# public_deps = [ ":goat" ] +# } +# +# rs_bindings_from_cc("goat_rs_api") { +# bindings_target = ":goat" +# public_headers = ["goat.h"] +# } +# source_set("goat") { +# sources = [ "goat.h", ... ] +# } +# +# teleport.h: +# #include "goat.h" +# void TeleportGoat(const Goat& goat_to_teleport); +# +# goat.h: +# struct Goat { ... }; +# +# main.rs: +# fn main() { +# let g: goat_rs_api::Goat = ...; +# teleport_rs_api::TeleportGoat(&g); +# } +# +# Debugging and implementation notes: +# +# - Consider running the build while CRUBIT_DEBUG environment variable is set. +# This will generate additional `.ir` file and log extra information from +# the `run_rs_bindings_from_cc.py` script (e.g. full cmdlines for invoking +# `rs_bindings_from_cc`). +# +template("rs_bindings_from_cc") { + # Mandatory parameter: bindings_target. + assert(defined(invoker.bindings_target), + "Must specify the C target to make bindings for.") + _bindings_target = invoker.bindings_target + + # Mandatory/unavoidable parameter: target_name + _lib_target_name = target_name + _base_target_name = get_label_info(_bindings_target, "name") + assert(_lib_target_name == "${_base_target_name}_rs_api", + "The convention is that bindings for `foo` are named `foo_rs_api`") + + # Mandatory parameter: public_headers. + assert(defined(invoker.public_headers), + "Must specify the public C headers to make bindings for.") + _rebased_public_headers = [] + foreach(hdr, invoker.public_headers) { + _rebased_public_headers += [ rebase_path(hdr) ] + } + + # Optional parameter: testonly. + _testonly = false + if (defined(invoker.testonly)) { + _testonly = invoker.testonly + } + + # Optional parameter: visibility. + if (defined(invoker.visibility)) { + _visibility = invoker.visibility + } + + # Optional parameter: deps. + # + # TODO(crbug.com/1329611): Can we somehow assert that `_deps` only contains + # some "..._rs_api" targets crated via + # `mixed_static_library($_lib_target_name)` below? foreach(dep, _deps) { + # assert something } + _deps = [] + if (defined(invoker.deps)) { + _deps = invoker.deps + } + + # Various names and paths that are shared across multiple targets defined + # in the template here. + _gen_bindings_target_name = "${_lib_target_name}_gen_bindings" + _gen_metadata_target_name = "${_lib_target_name}_gen_metadata" + _metadata_target_name = "${_lib_target_name}_metadata" + _metadata_path = "${target_gen_dir}/${_lib_target_name}_meta.json" + _rs_out_path = "${target_gen_dir}/${_lib_target_name}.rs" + _cc_out_path = "${target_gen_dir}/${_lib_target_name}_impl.cc" + + # Calculating the --targets_and_headers snippet for the *current* target + # and putting it into GN's `metadata`. + group(_metadata_target_name) { + testonly = _testonly + visibility = [ + ":${_gen_metadata_target_name}", + ":${_lib_target_name}", + ] + deps = [] + + metadata = { + # The data below corresponds to a single-target entry inside + # `--targets_and_headers` cmdline argument of `rs_bindings_from_cc`. + crubit_target_and_headers = [ + { + # The `get_label_info` call below expands ":foo_rs_api" into + # something like "//dir/bar/baz:foo_rs_api". Crubit assumes that + # there is a colon + uses the after-colon-suffix as the name of the + # crate. + t = get_label_info(":${_lib_target_name}", "label_no_toolchain") + h = _rebased_public_headers + }, + ] + } + } + + # Gathering --targets-and-headers data from *all* transitive dependencies and + # putting them into the file at `_metadata_path`. + generated_file(_gen_metadata_target_name) { + testonly = _testonly + visibility = [ ":${_gen_bindings_target_name}" ] + + deps = [ ":${_metadata_target_name}" ] + deps += _deps + + testonly = _testonly + outputs = [ _metadata_path ] + output_conversion = "json" + data_keys = [ "crubit_target_and_headers" ] + + # `walk_keys` are used to limit how deep the transitive dependency goes. + # This is important, because Crubit doesn't care about all the `deps` or + # `public_deps` of the `_bindings_target`. (See also the doc comment about + # `rs_bindings_from_cc.deps` parameter at the top of this file.) + walk_keys = [ "crubit_metadata_deps" ] + } + + # Exposing the generated Rust bindings. + mixed_static_library(_lib_target_name) { + testonly = _testonly + if (defined(_visibility)) { + visibility = _visibility + } + + sources = [ _cc_out_path ] + deps = _deps + deps += [ + ":${_gen_bindings_target_name}", + ":${_metadata_target_name}", + "//third_party/crubit:deps_of_rs_api_impl", + _bindings_target, + ] + + # Chromium already covers `chromium/src/` and `out/Release/gen` in the + # include path, but we need to explicitly add `out/Release` below. This + # is needed, because `--public_headers` passed to Crubit use paths relative + # to the `out/Release` directory. See also b/239238801. + include_dirs = [ root_build_dir ] + + rs_sources = [ _rs_out_path ] + rs_crate_name = _lib_target_name + rs_crate_root = _rs_out_path + rs_deps = _deps + rs_deps += [ + ":${_gen_bindings_target_name}", + "//third_party/crubit:deps_of_rs_api", + ] + + metadata = { + crubit_metadata_deps = _deps + [ ":${_metadata_target_name}" ] + } + } + + # Invoking Crubit's `rs_bindings_from_cc` tool to generate Rust bindings. + action(_gen_bindings_target_name) { + testonly = _testonly + if (defined(_visibility)) { + visibility = _visibility + } + + script = "//build/rust/run_rs_bindings_from_cc.py" + inputs = [ "//third_party/rust-toolchain/bin/rs_bindings_from_cc" ] + sources = invoker.public_headers + outputs = [ + _rs_out_path, + _cc_out_path, + ] + + deps = [ ":${_gen_metadata_target_name}" ] + args = [ + # Target-specific outputs: + "--rs_out", + rebase_path(_rs_out_path), + "--cc_out", + rebase_path(_cc_out_path), + + # Target-specific inputs: + "--public_headers", + string_join(",", _rebased_public_headers), + "--targets_and_headers_from_gn", + rebase_path(_metadata_path), + ] + + # Several important compiler flags come from default_compiler_configs + configs = default_compiler_configs + if (defined(invoker.configs)) { + configs += invoker.configs + } + args += [ + "--", + "{{defines}}", + "{{include_dirs}}", + "{{cflags}}", + + # This path contains important C headers (e.g. stddef.h) and {{cflags}} + # does not include it. Normally this path is implicitly added by clang but + # it does not happen for libclang. + # + # Add it last so includes from deps and configs take precedence. + "-isystem" + rebase_path( + clang_base_path + "/lib/clang/" + clang_version + "/include", + root_build_dir), + + # Passes C comments through as rustdoc attributes. + "-fparse-all-comments", + ] + } +} diff --git a/build/rust/run_bindgen.py b/build/rust/run_bindgen.py new file mode 100755 index 000000000000..a77c555113d9 --- /dev/null +++ b/build/rust/run_bindgen.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python3 + +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import subprocess +import sys + +# Set up path to be able to import action_helpers. +sys.path.append( + os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir, + os.pardir, 'build')) +import action_helpers + +from filter_clang_args import filter_clang_args + + +def atomic_copy(in_path, out_path): + with open(in_path, 'rb') as input: + with action_helpers.atomic_output(out_path) as output: + content = input.read() + output.write(content) + + +def copy_to_prefixed_filename(path, filename, prefix): + atomic_copy(os.path.join(path, filename), + os.path.join(path, prefix + "_" + filename)) + + +def main(): + parser = argparse.ArgumentParser("run_bindgen.py") + parser.add_argument("--exe", help="Path to bindgen", required=True), + parser.add_argument("--header", + help="C header file to generate bindings for", + required=True) + parser.add_argument("--depfile", + help="depfile to output with header dependencies") + parser.add_argument("--output", help="output .rs bindings", required=True) + parser.add_argument("--ld-library-path", + help="LD_LIBRARY_PATH (or DYLD_LIBRARY_PATH on Mac) to " + "set") + parser.add_argument("-I", "--include", help="include path", action="append") + parser.add_argument("--bindgen-flags", + help="flags to pass to bindgen", + nargs="*") + parser.add_argument( + "clangargs", + metavar="CLANGARGS", + help="arguments to pass to libclang (see " + "https://docs.rs/bindgen/latest/bindgen/struct.Builder.html#method.clang_args)", + nargs="*") + args = parser.parse_args() + + # Args passed to the actual bindgen cli + genargs = [] + genargs.append('--no-layout-tests') + if args.bindgen_flags is not None: + for flag in args.bindgen_flags: + genargs.append("--" + flag) + + # TODO(danakj): We need to point bindgen to + # //third_party/rust-toolchain/bin/rustfmt. + genargs.append('--no-rustfmt-bindings') + genargs += ['--rust-target', 'nightly'] + + if args.depfile: + genargs.append('--depfile') + genargs.append(args.depfile) + genargs.append('--output') + genargs.append(args.output) + genargs.append(args.header) + genargs.append('--') + genargs.extend(filter_clang_args(args.clangargs)) + env = os.environ + if args.ld_library_path: + if sys.platform == 'darwin': + env["DYLD_LIBRARY_PATH"] = args.ld_library_path + else: + env["LD_LIBRARY_PATH"] = args.ld_library_path + returncode = subprocess.run([args.exe, *genargs], env=env).returncode + if returncode != 0: + # Make sure we don't emit anything if bindgen failed. + try: + os.remove(args.output) + except FileNotFoundError: + pass + try: + os.remove(args.depfile) + except FileNotFoundError: + pass + return returncode + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/rust/run_build_script.py b/build/rust/run_build_script.py new file mode 100755 index 000000000000..0db5cb56fa2c --- /dev/null +++ b/build/rust/run_build_script.py @@ -0,0 +1,164 @@ +#!/usr/bin/env vpython3 + +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This is a wrapper script which runs a Cargo build.rs build script +# executable in a Cargo-like environment. Build scripts can do arbitrary +# things and we can't support everything. Moreover, we do not WANT +# to support everything because that means the build is not deterministic. +# Code review processes must be applied to ensure that the build script +# depends upon only these inputs: +# +# * The environment variables set by Cargo here: +# https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts +# * Output from rustc commands, e.g. to figure out the Rust version. +# +# Similarly, the only allowable output from such a build script +# is currently: +# +# * Generated .rs files +# * cargo:rustc-cfg output. +# +# That's it. We don't even support the other standard cargo:rustc- +# output messages. + +import argparse +import io +import os +import platform +import re +import subprocess +import sys +import tempfile + +# Set up path to be able to import action_helpers +sys.path.append( + os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir, + os.pardir, 'build')) +import action_helpers + + +RUSTC_VERSION_LINE = re.compile(r"(\w+): (.*)") + + +def rustc_name(): + if platform.system() == 'Windows': + return "rustc.exe" + else: + return "rustc" + + +def host_triple(rustc_path): + """ Works out the host rustc target. """ + args = [rustc_path, "-vV"] + known_vars = dict() + proc = subprocess.Popen(args, stdout=subprocess.PIPE) + for line in io.TextIOWrapper(proc.stdout, encoding="utf-8"): + m = RUSTC_VERSION_LINE.match(line.rstrip()) + if m: + known_vars[m.group(1)] = m.group(2) + return known_vars["host"] + + +RUSTC_CFG_LINE = re.compile("cargo:rustc-cfg=(.*)") + + +def main(): + parser = argparse.ArgumentParser(description='Run Rust build script.') + parser.add_argument('--build-script', + required=True, + help='build script to run') + parser.add_argument('--output', + required=True, + help='where to write output rustc flags') + parser.add_argument('--target', help='rust target triple') + parser.add_argument('--features', help='features', nargs='+') + parser.add_argument('--env', help='environment variable', nargs='+') + parser.add_argument('--rust-prefix', required=True, help='rust path prefix') + parser.add_argument('--generated-files', nargs='+', help='any generated file') + parser.add_argument('--out-dir', required=True, help='target out dir') + parser.add_argument('--src-dir', required=True, help='target source dir') + + args = parser.parse_args() + + rustc_path = os.path.join(args.rust_prefix, rustc_name()) + + # We give the build script an OUT_DIR of a temporary directory, + # and copy out only any files which gn directives say that it + # should generate. Mostly this is to ensure we can atomically + # create those files, but it also serves to avoid side-effects + # from the build script. + # In the future, we could consider isolating this build script + # into a chroot jail or similar on some platforms, but ultimately + # we are always going to be reliant on code review to ensure the + # build script is deterministic and trustworthy, so this would + # really just be a backup to humans. + with tempfile.TemporaryDirectory() as tempdir: + env = {} # try to avoid build scripts depending on other things + env["RUSTC"] = os.path.abspath(rustc_path) + env["OUT_DIR"] = tempdir + env["CARGO_MANIFEST_DIR"] = os.path.abspath(args.src_dir) + env["HOST"] = host_triple(rustc_path) + if args.target is None: + env["TARGET"] = env["HOST"] + else: + env["TARGET"] = args.target + target_components = env["TARGET"].split("-") + env["CARGO_CFG_TARGET_ARCH"] = target_components[0] + if args.features: + for f in args.features: + feature_name = f.upper().replace("-", "_") + env["CARGO_FEATURE_%s" % feature_name] = "1" + if args.env: + for e in args.env: + (k, v) = e.split("=") + env[k] = v + # Pass through a couple which are useful for diagnostics + if os.environ.get("RUST_BACKTRACE"): + env["RUST_BACKTRACE"] = os.environ.get("RUST_BACKTRACE") + if os.environ.get("RUST_LOG"): + env["RUST_LOG"] = os.environ.get("RUST_LOG") + + # In the future we should, set all the variables listed here: + # https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts + + proc = subprocess.run([os.path.abspath(args.build_script)], + env=env, + cwd=args.src_dir, + encoding='utf8', + capture_output=True) + + if proc.stderr.rstrip(): + print(proc.stderr.rstrip(), file=sys.stderr) + proc.check_returncode() + + flags = "" + for line in proc.stdout.split("\n"): + m = RUSTC_CFG_LINE.match(line.rstrip()) + if m: + flags = "%s--cfg\n%s\n" % (flags, m.group(1)) + + # AtomicOutput will ensure we only write to the file on disk if what we + # give to write() is different than what's currently on disk. + with action_helpers.atomic_output(args.output) as output: + output.write(flags.encode("utf-8")) + + # Copy any generated code out of the temporary directory, + # atomically. + if args.generated_files: + for generated_file in args.generated_files: + in_path = os.path.join(tempdir, generated_file) + out_path = os.path.join(args.out_dir, generated_file) + out_dir = os.path.dirname(out_path) + if not os.path.exists(out_dir): + os.makedirs(out_dir) + with open(in_path, 'rb') as input: + with action_helpers.atomic_output(out_path) as output: + content = input.read() + output.write(content) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/rust/run_rs_bindings_from_cc.py b/build/rust/run_rs_bindings_from_cc.py new file mode 100755 index 000000000000..0b6ed4aa8f11 --- /dev/null +++ b/build/rust/run_rs_bindings_from_cc.py @@ -0,0 +1,127 @@ +#!/usr/bin/env python3 + +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import json +import os +import subprocess +import sys + +THIS_DIR = os.path.dirname(os.path.abspath(__file__)) +CHROMIUM_SRC_DIR = os.path.relpath(os.path.join(THIS_DIR, os.pardir, os.pardir)) +sys.path.append(THIS_DIR) +from run_bindgen import filter_clang_args + +RUST_TOOLCHAIN_DIR = os.path.join(CHROMIUM_SRC_DIR, "third_party", + "rust-toolchain") +RUSTFMT_EXE_PATH = os.path.join(RUST_TOOLCHAIN_DIR, "bin", "rustfmt") +RUSTFMT_CONFIG_PATH = os.path.join(CHROMIUM_SRC_DIR, ".rustfmt.toml") +RS_BINDINGS_FROM_CC_EXE_PATH = os.path.join(RUST_TOOLCHAIN_DIR, "bin", + "rs_bindings_from_cc") + + +def format_cmdline(args): + def quote_arg(x): + if ' ' not in x: return x + x = x.replace('"', '\\"') + return f"\"{x}\"" + + return " ".join([quote_arg(x) for x in args]) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("--targets_and_headers_from_gn", + metavar="FILE", + help="File parsed into --targets_and_headers Crubit arg", + required=True), + parser.add_argument("--public_headers", + metavar="FILE", + help="Passed through to Crubit", + required=True), + parser.add_argument("--rs_out", + metavar="FILE", + help="Passed through to Crubit", + required=True), + parser.add_argument("--cc_out", + metavar="FILE", + help="Passed through to Crubit", + required=True), + parser.add_argument("clang_args", + metavar="CLANGARGS", + help="Arguments to forward to clang libraries", + nargs=argparse.REMAINDER) + args = parser.parse_args() + + # Output paths + generator_args = [] + generator_args.append("--rs_out={0}".format(os.path.relpath(args.rs_out))) + generator_args.append("--cc_out={0}".format(os.path.relpath(args.cc_out))) + if "CRUBIT_DEBUG" in os.environ: + generator_args.append("--ir_out={0}".format( + os.path.relpath(args.rs_out).replace(".rs", ".ir"))) + + # Public headers. + generator_args.append("--public_headers={0}".format(",".join( + [os.path.relpath(hdr) for hdr in args.public_headers.split(",")]))) + + # Targets to headers map. + with open(args.targets_and_headers_from_gn, "r") as f: + targets_and_headers = json.load(f) + for entry in targets_and_headers: + hdrs = entry["h"] + for i in range(len(hdrs)): + hdrs[i] = os.path.relpath(hdrs[i]) + generator_args.append("--targets_and_headers={0}".format( + json.dumps(targets_and_headers))) + + # All Crubit invocations in Chromium share the following cmdline args. + generator_args.append(f"--rustfmt_exe_path={RUSTFMT_EXE_PATH}") + generator_args.append(f"--rustfmt_config_path={RUSTFMT_CONFIG_PATH}") + generator_args.append( + "--crubit_support_path=third_party/crubit/src/rs_bindings_from_cc/support" + ) + + # Long cmdlines may not work - work around that by using Abseil's `--flagfile` + # https://abseil.io/docs/python/guides/flags#a-note-about---flagfile + # + # Note that `clang_args` are not written to the flag file, because Abseil's + # flag parsing code is only aware of `ABSL_FLAG`-declared flags and doesn't + # know about Clang args (e.g. `-W...` or `-I...`). + params_file_path = os.path.relpath(args.rs_out).replace(".rs", ".params") + with open(params_file_path, "w") as f: + for line in generator_args: + print(line, file=f) + + # Clang arguments. + # + # The call to `filter_clang_args` is needed to avoid the following error: + # error: unable to find plugin 'find-bad-constructs' + clang_args = [] + clang_args.extend(filter_clang_args(args.clang_args)) + # TODO(crbug.com/1329611): This warning needs to be suppressed, because + # otherwise Crubit/Clang complains as follows: + # error: .../third_party/rust-toolchain/bin/rs_bindings_from_cc: + # 'linker' input unused [-Werror,-Wunused-command-line-argument] + # Maybe `build/rust/rs_bindings_from_cc.gni` gives too much in `args`? But + # then `{{cflags}}` seems perfectly reasonable... + clang_args += ["-Wno-unused-command-line-argument"] + + # Print a copy&pastable final cmdline when asked for debugging help. + cmdline = [RS_BINDINGS_FROM_CC_EXE_PATH, f"--flagfile={params_file_path}"] + cmdline.extend(clang_args) + if "CRUBIT_DEBUG" in os.environ: + pretty_cmdline = format_cmdline(cmdline) + print(f"CRUBIT_DEBUG: CMDLINE: {pretty_cmdline}", file=sys.stderr) + + # TODO(crbug.com/1329611): run_bindgen.py removes the outputs when the tool + # fails. Maybe we need to do something similar here? OTOH in most failure + # modes Crubit will fail *before* generating its outputs... + return subprocess.run(cmdline).returncode + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/rust/rust_bindgen.gni b/build/rust/rust_bindgen.gni new file mode 100644 index 000000000000..9d72169ba16b --- /dev/null +++ b/build/rust/rust_bindgen.gni @@ -0,0 +1,193 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/clang/clang.gni") +import("//build/config/rust.gni") +import("//build/config/sysroot.gni") +import("//build/rust/rust_static_library.gni") + +if (is_win) { + import("//build/toolchain/win/win_toolchain_data.gni") +} + +_rustc_base_path = rust_sysroot + +# TODO(danakj): When we're using the Android prebuilt toolchain, there's no +# bindgen present. bindgen is for the host platform so using the linux one will +# work. +if (!use_chromium_rust_toolchain) { + _rustc_base_path = "//third_party/rust-toolchain" +} + +_bindgen_path = "${_rustc_base_path}/bin/bindgen" +if (is_win) { + _bindgen_path = "${_bindgen_path}.exe" +} + +# Template to build Rust/C bindings with bindgen. +# +# This template expands to a static_library containing the Rust side of the +# bindings. Simply treat it as a public dependency. +# +# Parameters: +# +# header: +# The .h file to generate bindings for. +# +# deps: (optional) +# C targets on which the headers depend in order to build successfully. +# +# configs: (optional) +# C compilation targets determine the correct list of -D and -I flags based +# on their dependencies and any configs applied. The same applies here. Set +# any configs here as if this were a C target. +# +# bindgen_flags: (optional) +# the additional bindgen flags which are passed to the executable +# +# Rust targets depending on the output must include! the generated file. +# +template("rust_bindgen") { + assert(defined(invoker.header), + "Must specify the C header file to make bindings for.") + action(target_name) { + # bindgen relies on knowing the {{defines}} and {{include_dirs}} required + # to build the C++ headers which it's parsing. These are passed to the + # script's args and are populated using deps and configs. + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "deps", + "configs", + ]) + + sources = [ invoker.header ] + + if (!defined(configs)) { + configs = [] + } + + # Several important compiler flags come from default_compiler_configs + configs += default_compiler_configs + + output_dir = "$target_gen_dir" + out_gen_rs = "$output_dir/${target_name}.rs" + + script = rebase_path("//build/rust/run_bindgen.py") + inputs = [ _bindgen_path ] + + depfile = "$target_out_dir/${target_name}.d" + outputs = [ out_gen_rs ] + + lib_path = "" + if (is_linux) { + # Linux clang, and clang libs, use a shared libstdc++, which we must + # point to. + clang_ld_path = rebase_path(clang_base_path + "/lib", root_build_dir) + lib_path += "${clang_ld_path}:" + } + rust_ld_path = rebase_path(_rustc_base_path + "/lib", root_build_dir) + lib_path += "${rust_ld_path}" + + args = [ + "--exe", + rebase_path(_bindgen_path), + "--header", + rebase_path(invoker.header, root_build_dir), + "--depfile", + rebase_path(depfile, root_build_dir), + "--output", + rebase_path(out_gen_rs, root_build_dir), + "--ld-library-path", + lib_path, + ] + + if (defined(invoker.bindgen_flags)) { + args += [ "--bindgen-flags" ] + foreach(flag, invoker.bindgen_flags) { + args += [ flag ] + } + } + + args += [ + "--", + "{{defines}}", + "{{include_dirs}}", + "{{cflags}}", + "{{cflags_c}}", + ] + + # Clang ships with some headers, which are installed along side the binary, + # and which clang itself finds by default, but libclang does not (see also + # https://reviews.llvm.org/D95396 which would resolve this but was reverted). + clang_headers = rebase_path( + clang_base_path + "/lib/clang/" + clang_version + "/include", + root_build_dir) + if (is_win) { + args += [ "-imsvc" + clang_headers ] + } else { + args += [ "-isystem" + clang_headers ] + } + + if (is_win) { + # On Windows we fall back to using system headers from a sysroot from + # depot_tools. This is negotiated by python scripts and the result is + # available in //build/toolchain/win/win_toolchain_data.gni. From there + # we get the `include_flags_imsvc` which point to the system headers. + if (host_cpu == "x86") { + win_toolchain_data = win_toolchain_data_x86 + } else if (host_cpu == "x64") { + win_toolchain_data = win_toolchain_data_x64 + } else if (host_cpu == "arm64") { + win_toolchain_data = win_toolchain_data_arm64 + } else { + error("Unsupported host_cpu, add it to win_toolchain_data.gni") + } + args += [ "${win_toolchain_data.include_flags_imsvc}" ] + } + + # Passes C comments through as rustdoc attributes. + if (is_win) { + args += [ "/clang:-fparse-all-comments" ] + } else { + args += [ "-fparse-all-comments" ] + } + + # Default configs include "-fvisibility=hidden", and for some reason this + # causes bindgen not to emit function bindings. Override it. + if (!is_win) { + args += [ "-fvisibility=default" ] + } + + if (is_win) { + # We pass MSVC style flags to clang on Windows, and libclang needs to be + # told explicitly to accept them. + args += [ "--driver-mode=cl" ] + + # On Windows, libclang adds arguments that it then fails to understand. + # -fno-spell-checking + # -fallow-editor-placeholders + # These should not cause bindgen to fail. + args += [ "-Wno-unknown-argument" ] + + # Replace these two arguments with a version that clang-cl can parse. + args += [ + "/clang:-fno-spell-checking", + "/clang:-fallow-editor-placeholders", + ] + } + + if (!is_cfi) { + # LLVM searches for a default CFI ignorelist at (exactly) + # $(cwd)/lib/clang/$(llvm_version)/share/cfi_ignorelist.txt + # Even if we provide a custom -fsanitize-ignorelist, the absence + # of this default file will cause a fatal error. clang finds + # it within third_party/llvm-build, but for bindgen our cwd + # is the $out_dir. We _could_ create this file at the right + # location within the outdir using a "copy" target, but as + # we don't actually generate code within bindgen, the easier + # option is to tell bindgen to ignore all CFI ignorelists. + args += [ "-fno-sanitize-ignorelist" ] + } + } +} diff --git a/build/rust/rust_executable.gni b/build/rust/rust_executable.gni new file mode 100644 index 000000000000..ea22aa3b5bd0 --- /dev/null +++ b/build/rust/rust_executable.gni @@ -0,0 +1,70 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_target.gni") + +# Defines a Rust executable. +# +# This is identical to the built-in gn intrinsic 'executable' but +# supports some additional parameters, as below: +# +# edition (optional) +# Edition of the Rust language to be used. +# Options are "2015", "2018" and "2021". Defaults to "2021". +# +# test_deps (optional) +# List of GN targets on which this crate's tests depend, in addition +# to deps. +# +# build_native_rust_unit_tests (optional) +# Builds native unit tests (under #[cfg(test)]) written inside the Rust +# crate. This will create a `_unittests` executable in the output +# directory when set to true. +# Chromium code should not set this, and instead prefer to split the code +# into a library and write gtests against it. See how to do that in +# //testing/rust_gtest_interop/README.md. +# +# unit_test_target (optional) +# Overrides the default name for the unit tests target +# +# features (optional) +# A list of conditional compilation flags to enable. This can be used +# to set features for crates built in-tree which are also published to +# crates.io. Each feature in the list will be passed to rustc as +# '--cfg feature=XXX' +# +# inputs (optional) +# Additional input files needed for compilation (such as `include!`ed files) +# +# test_inputs (optional) +# Same as above but for the unit tests target +# +# Example of usage: +# +# rust_executable("foo_bar") { +# deps = [ +# "//boo/public/rust/bar", +# ] +# sources = [ "src/main.rs" ] +# } +# +# This template is intended to serve the same purpose as 'rustc_library' +# in Fuchsia. +template("rust_executable") { + exclude_forwards = TESTONLY_AND_VISIBILITY + [ "configs" ] + rust_target(target_name) { + forward_variables_from(invoker, "*", exclude_forwards) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + if (defined(invoker.configs)) { + library_configs = [] + library_configs = invoker.configs + } + target_type = "executable" + assert(!defined(cxx_bindings)) + } +} + +set_defaults("rust_executable") { + configs = default_executable_configs +} diff --git a/build/rust/rust_macro.gni b/build/rust/rust_macro.gni new file mode 100644 index 000000000000..427220b9f5e3 --- /dev/null +++ b/build/rust/rust_macro.gni @@ -0,0 +1,19 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/rust.gni") +import("//build/rust/rust_target.gni") + +# Template for generating a Rust proc-macro library. Such targets produce a +# dynamic library that is loaded during compilation and used to generate Rust +# code for compilation. +template("rust_macro") { + rust_target(target_name) { + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + # Has rust_target generate a rust_proc_macro GN output. + target_type = "rust_proc_macro" + } +} diff --git a/build/rust/rust_shared_library.gni b/build/rust/rust_shared_library.gni new file mode 100644 index 000000000000..6bea51d09b8d --- /dev/null +++ b/build/rust/rust_shared_library.gni @@ -0,0 +1,26 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_target.gni") + +# Defines a shared_library containing just Rust code. Has the same variables +# available as a rust_static_library. See rust_static_library.gni for +# documentation. +template("rust_shared_library") { + exclude_forwards = TESTONLY_AND_VISIBILITY + [ "configs" ] + rust_target(target_name) { + forward_variables_from(invoker, "*", exclude_forwards) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + if (defined(invoker.configs)) { + library_configs = [] + library_configs = invoker.configs + } + target_type = "shared_library" + crate_type = "cdylib" + } +} + +set_defaults("rust_shared_library") { + configs = default_shared_library_configs +} diff --git a/build/rust/rust_static_library.gni b/build/rust/rust_static_library.gni new file mode 100644 index 000000000000..6512b3491292 --- /dev/null +++ b/build/rust/rust_static_library.gni @@ -0,0 +1,169 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_target.gni") + +# Defines a Rust static library which can be used by downstream Rust or C++ +# targets. This is a single Rust compilation unit consisting of potentially +# multiple .rs files. +# +# We term this 'rust_static_library' because it is used most analogously +# to a C++ 'static_library' in Chromium. Like the C++ one, it can be compiled +# independently into an intermediate linking target. The output contains the +# object file(s) of the GN target's sources, and not its dependencies. +# +# Parameters +# +# sources +# List of source files which this crate is allowed to compile, which is +# used to determine the impact of source code changes on other GN targets. +# This is not used by the Rust compiler, as it discovers source files by +# following `mod` declarations starting at the `crate_root`. The +# discovered source files must match this list. (This is not yet enforced, +# but will be.) +# +# epoch (optional) +# The major version of the library, which is used to differentiate between +# multiple versions of the same library name. This includes all leading 0s +# and the first non-zero value in the crate's version. This should be left +# as the default, which is "0", for first-party code unless there are +# multiple versions of a crate present. For third-party code, the version +# epoch (matching the directory it is found in) should be specified. +# +# Examples: +# 1.0.2 => epoch = "1" +# 4.2.0 => epoch = "4" +# 0.2.7 => epoch = "0.2" +# 0.0.3 => epoch = "0.0.3" +# +# edition (optional) +# Edition of the Rust language to be used. +# Options are "2015", "2018" and "2021". Defaults to "2021". +# +# allow_unsafe (optional) +# Set to true to allow unsafe code in this target. Defaults to false. +# +# configs (optional) +# A list of config labels (in the GN meaning) applying to this target. +# +# rustflags (optional) +# Explicit flags for rustc command line. (Use 'edition' or 'features' +# where possible). +# +# deps (optional) +# List of GN targets on which this crate depends. These may be Rust +# or non-Rust targets. +# +# public_deps (optional) +# List of GN targets on which this crate depends, and which are exported +# into the dependency list of any crate that depends on it. Dependency +# crates that appear in the public API should be included here. +# +# test_deps (optional) +# List of GN targets on which this crate's tests depend, in addition +# to deps. +# +# is_gtest_unittests (optional) +# Should only be set to true for rlibs of gtest unit tests. This ensures +# all objects in the rlib are linked into the final target, rather than +# pruning dead code, so that the tests themselves are not discarded by the +# linker. +# +# mutually_dependent_target (optional) +# mutually_dependent_public_deps (optional) +# These is for use by the mixed_target() template. +# +# If this Rust code is intrinsically paired with some C/C++ code, +# with bidirectional calls between the two, then this would +# be a circular dependency. GN does not allow circular dependencies, +# (other than for header files per allow_circular_includes_from). +# But this is common for a 'component' which has both Rust and C++ +# code. You should structure things such that the C++ code depends +# on the Rust code in the normal way: +# static_library("cpp_stuff") { +# deps = [ "rust_stuff" ] +# # .. +# } +# but that the Rust target also notes the C++ target using this +# 'mutually_dependent_target' parameter. +# rust_static_library("rust_stuff") { +# mutually_dependent_target = "cpp_stuff" +# mutually_dependent_public_deps = _cpp_stuff_public_deps +# # .. +# } +# +# This causes the Rust unit tests, if generated, to depend on the mutually +# dependent target, since depending on the Rust code only would be +# insufficient. And it allows any C++ bindings generated from the Rust code +# to include headers from the mutually_dependent_target by depending on its +# public_deps. +# +# build_native_rust_unit_tests (optional) +# Builds native unit tests (under #[cfg(test)]) written inside the Rust +# crate. This will create a `_unittests` executable in the output +# directory when set to true. +# +# unit_test_target (optional) +# Overrides the default name for the unit tests target +# +# crate_root (optional) +# Location of the crate root. +# This defaults to `./src/lib.rs` and should only be changed when +# absolutely necessary (such as in the case of generated code). +# +# features (optional) +# A list of conditional compilation flags to enable. This can be used +# to set features for crates built in-tree which are also published to +# crates.io. Each feature in the list will be passed to rustc as +# '--cfg feature=XXX' +# +# cxx_bindings (optional) +# A list of Rust files which contain #[cxx::bridge] mods and should +# therefore have C++ bindings generated. See https://cxx.rs. +# This will automatically add appropriate dependencies: there's no +# need to depend on the cxx crate or any generated bindings. +# +# visibility (optional) +# rustflags (optional) +# crate_name (optional) +# Per the usual gn meaning for Rust targets. +# +# inputs (optional) +# Additional input files needed for compilation (such as `include!`ed files) +# +# test_inputs (optional) +# Same as above but for the unit tests target +# +# Example of usage: +# +# rust_static_library("foo_bar") { +# deps = [ +# "//boo/public/rust/bar", +# "//third_party/rust/crates:argh", +# "//third_party/rust/crates:serde", +# "//third_party/rust/crates:slab", +# ] +# sources = [ "src/lib.rs" ] +# } +# +# This template is intended to serve the same purpose as 'rustc_library' +# in Fuchsia. +template("rust_static_library") { + exclude_forwards = TESTONLY_AND_VISIBILITY + [ "configs" ] + _target_name = target_name + + rust_target(_target_name) { + forward_variables_from(invoker, "*", exclude_forwards) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + if (defined(invoker.configs)) { + library_configs = [] + library_configs = invoker.configs + } + target_type = "rust_library" + } +} + +set_defaults("rust_static_library") { + configs = default_compiler_configs +} diff --git a/build/rust/rust_target.gni b/build/rust/rust_target.gni new file mode 100644 index 000000000000..1a2bf1db5491 --- /dev/null +++ b/build/rust/rust_target.gni @@ -0,0 +1,448 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/rust.gni") +import("//build/rust/analyze.gni") +import("//build/rust/rust_unit_test.gni") + +# The //build directory is re-used for non-Chromium products. We do not support +# cxx bindings in such contexts, because //third_party may be missing. +if (build_with_chromium) { + import("//third_party/rust/cxx/chromium_integration/rust_cxx.gni") +} + +# Creates a Rust target (rlib, executable, proc macro etc.) with ability to +# understand some handy variables such as "edition" and "features" and also to +# build any associated unit tests. +# +# Normally, you should not use this directly. Use either +# - cargo_crate.gni - for 3p crates only +# - rust_static_library.gni - for 1p Rust code +# +# Because the common use of this is rust_static_library, all the documentation +# for the supported options is given in rust_static_library.gni. Please refer +# over there. +# +# If you're using rust_target directly, you will also need to specify: +# target_type executable, rust_library etc. per GN norms +# +# There is one area where this differs from `rust_static_library`: configs. +# Here, you must specify `executable_configs` or `library_configs` depending on +# the type of thing you're generating. This is so that different defaults can +# be provided. + +template("rust_target") { + # Only one of `crate_root` or `generate_crate_root` can be specified, or + # neither. + assert(!defined(invoker.crate_root) || + !(defined(invoker.generate_crate_root) && invoker.generate_crate_root)) + + _target_name = target_name + _crate_name = target_name + if (defined(invoker.crate_name)) { + _crate_name = invoker.crate_name + } + + if (defined(invoker.output_dir) && invoker.output_dir != "") { + # This is where the build target (.exe, .rlib, etc) goes. + _output_dir = invoker.output_dir + } + + # This is where the OUT_DIR environment variable points to when running a + # build script and when compiling the build target, for consuming generated + # files. + _env_out_dir = "$target_gen_dir/$_target_name" + + _allow_unsafe = false + if (defined(invoker.allow_unsafe)) { + _allow_unsafe = invoker.allow_unsafe + } + + if (defined(invoker.generate_crate_root) && invoker.generate_crate_root) { + generated_file("${_target_name}_crate_root") { + outputs = [ "${target_gen_dir}/${target_name}.rs" ] + contents = [ + "// Generated crate root for ${_target_name}.", + "// @generated", + "", + ] + foreach(rs, invoker.sources) { + rs_path_from_root = rebase_path(rs, target_gen_dir) + contents += [ "#[path = \"${rs_path_from_root}\"]" ] + + # Drop the file extension from the module name. + rs_modname = string_replace(rs, ".rs", "") + + # Replace invalid "/" chars in the source file path. + rs_modname = string_replace(rs_modname, "/", "_") + + # Since source files are specified relative to the BUILD.gn they may + # also have ".." path components. + rs_modname = string_replace(rs_modname, "..", "dotdot") + contents += [ + "mod ${rs_modname};", + "", + ] + } + } + _crate_root = + string_join("", get_target_outputs(":${_target_name}_crate_root")) + } else if (defined(invoker.crate_root)) { + _crate_root = invoker.crate_root + } else if (invoker.target_type == "executable") { + _crate_root = "src/main.rs" + } else { + _crate_root = "src/lib.rs" + } + + _testonly = false + if (defined(invoker.testonly)) { + _testonly = invoker.testonly + } + if (defined(invoker.visibility)) { + _visibility = invoker.visibility + } + + _use_local_std = use_local_std_by_default + if (defined(invoker.use_local_std)) { + _use_local_std = invoker.use_local_std + } + + _rustflags = [] + if (defined(invoker.rustflags)) { + _rustflags += invoker.rustflags + } + if (defined(invoker.features)) { + foreach(i, invoker.features) { + _rustflags += [ "--cfg=feature=\"${i}\"" ] + } + } + _edition = "2021" + if (defined(invoker.edition)) { + _edition = invoker.edition + } + _configs = [ "//build/rust:edition_${_edition}" ] + _test_configs = [] + if (invoker.target_type == "executable") { + if (defined(invoker.executable_configs)) { + _configs += invoker.executable_configs + } + } else if (invoker.target_type == "rust_proc_macro") { + if (defined(invoker.proc_macro_configs)) { + _configs += invoker.proc_macro_configs + _test_configs += [ "//build/rust:proc_macro_extern" ] + } + } else { + if (defined(invoker.library_configs)) { + _configs += invoker.library_configs + } + } + _forward_to_host_toolchain = false + if (invoker.target_type == "rust_proc_macro") { + if (current_toolchain != host_toolchain_no_sanitizers) { + _forward_to_host_toolchain = true + } + _main_target_suffix = "${target_name}__proc_macro" + } else { + _main_target_suffix = "__rlib" + } + + _deps = [] + if (defined(invoker.deps)) { + _deps += invoker.deps + } + _public_deps = [] + if (defined(invoker.public_deps)) { + _public_deps += invoker.public_deps + } + if (defined(invoker.aliased_deps)) { + _aliased_deps = invoker.aliased_deps + } else { + _aliased_deps = { + } + } + + _is_data_dep = defined(invoker.is_data_dep) && invoker.is_data_dep + + _build_unit_tests = false + if (defined(invoker.build_native_rust_unit_tests)) { + _build_unit_tests = + invoker.build_native_rust_unit_tests && can_build_rust_unit_tests + } + + # Declares that the Rust crate generates bindings between C++ and Rust via the + # Cxx crate. It may generate C++ headers and/or use the cxx crate macros to + # generate Rust code internally, depending on what bindings are declared. If + # set, it's a set of rust files that include Cxx bindings declarations. + _cxx_bindings = [] + if (defined(invoker.cxx_bindings)) { + assert(build_with_chromium, + "cxx bindings are not supported when building rust targets " + + "outside the Chromium build.") + _cxx_bindings = invoker.cxx_bindings + } + _rustenv = [ "OUT_DIR=" + rebase_path(_env_out_dir) ] + if (defined(invoker.rustenv)) { + _rustenv += invoker.rustenv + } + + # TODO(danakj): This could be a hash generated from the input crate, such as + # from its path, in which case the BUILD.gn would not need to specify + # anything. But GN doesn't give us a hash function to make that easy. + _metadata = "0" + if (defined(invoker.epoch)) { + _metadata = invoker.epoch + } + + # We require that all source files are listed, even though this is + # not a requirement for rustc. The reason is to ensure that tools + # such as `gn deps` give the correct answer, and thus we trigger + # the right test suites etc. on code change. + # TODO(crbug.com/1256930) - verify this is correct + assert(defined(invoker.sources), "sources must be listed") + + if (_forward_to_host_toolchain) { + # Redirect to the host toolchain. + group(_target_name) { + testonly = _testonly + if (defined(_visibility)) { + visibility = _visibility + } + public_deps = [ + ":${_target_name}${_main_target_suffix}($host_toolchain_no_sanitizers)", + ] + } + + not_needed(invoker, "*") + not_needed([ + "_allow_unsafe", + "_build_unit_tests", + "_crate_root", + "_crate_name", + "_cxx_bindings", + "_deps", + "_aliased_deps", + "_is_data_dep", + "_metadata", + "_out_dir", + "_public_deps", + "_rustenv", + "_rustflags", + "_support_use_from_cpp", + "_testonly", + "_use_local_std", + "_visibility", + ]) + } else { + group(_target_name) { + testonly = _testonly + if (defined(_visibility)) { + visibility = _visibility + } + + # Both the C++ bindings (if present) and the Rust crate should be treated + # like direct dependencies, so we expose them both in public_deps. + public_deps = [ ":${_target_name}${_main_target_suffix}" ] + + # TODO(danakj): This would not be needed if we stopped forwarding through + # a group in the common (non-procmacro) case. + if (_is_data_dep) { + data_deps = [ ":${_target_name}${_main_target_suffix}" ] + } + + if (_cxx_bindings != []) { + public_deps += [ ":${_target_name}_cxx_generated" ] + + # Additionally, C++ bindings generated by Cxx can include C++ types + # that come from the Cxx library, such as `rust::Str`. So any C++ + # target that depends on a rust target directly may need access to Cxx + # as well, which means it must appear in public_deps. + public_deps += [ "//build/rust:cxx_cppdeps" ] + + # cxx_cppdeps pulls in the default libstd, so make sure the default was + # not overridden. + assert( + _use_local_std == use_local_std_by_default, + "Rust targets with cxx bindings cannot override the default libstd") + } else if (!defined(invoker.no_std) || !invoker.no_std) { + # If C++ depends on and links in the library, we need to make sure C++ + # links in the Rust stdlib. This is orthogonal to if the library exports + # bindings for C++ to use. + if (_use_local_std) { + deps = [ "//build/rust/std:link_local_std" ] + } else { + assert(prebuilt_libstd_supported, + "Prebuilt Rust stdlib is not available for this target") + deps = [ "//build/rust/std:link_prebuilt_std" ] + } + } + } + + _rust_deps = _deps + _rust_aliased_deps = _aliased_deps + _rust_public_deps = _public_deps + _cxx_deps = _deps + + # The Rust target (and unit tests) need the Cxx crate when using it to + # generate bindings. + if (_cxx_bindings != []) { + _rust_deps += [ "//build/rust:cxx_rustdeps" ] + + # C++ targets can depend on the Rust target from the BUILD.gn file to + # access the headers generated from it + _rust_public_deps += [ ":${_target_name}_cxx_generated" ] + } + + if (!defined(invoker.no_std) || !invoker.no_std) { + if (_use_local_std) { + _rust_deps += [ "//build/rust/std:local_std_for_rustc" ] + } else { + _rust_deps += [ "//build/rust/std:prebuilt_std_for_rustc" ] + } + } else { + not_needed([ "_use_local_std" ]) + } + + # You must go through the groups above to get to these targets. + _visibility = [] + _visibility = [ ":${_target_name}" ] + + target(invoker.target_type, "${_target_name}${_main_target_suffix}") { + forward_variables_from(invoker, + "*", + TESTONLY_AND_VISIBILITY + [ + "features", + "deps", + "aliased_deps", + "public_deps", + "rustflags", + "rustenv", + "configs", + "unit_test_output_dir", + "unit_test_target", + "test_inputs", + ]) + + testonly = _testonly + visibility = _visibility + crate_name = _crate_name + crate_root = _crate_root + configs = [] + configs = _configs + deps = _rust_deps + aliased_deps = _rust_aliased_deps + public_deps = _rust_public_deps + rustflags = _rustflags + rustflags += [ "-Cmetadata=${_metadata}" ] + rustenv = _rustenv + + # The Rust tool() declarations, like C++ ones, use the output_name and + # output_dir, so that GN targets can override these if needed. Here we + # give them their default values, or allow them to be overridden. + if (defined(_output_dir)) { + output_dir = _output_dir + } + if (!defined(output_name) || output_name == "") { + output_name = crate_name + } + + if (compute_inputs_for_analyze) { + deps += [ ":${_target_name}_analyze" ] + } + + if (!_allow_unsafe) { + configs += [ "//build/rust:forbid_unsafe" ] + } + } + + if (compute_inputs_for_analyze) { + # Find and depend on all rust files in the crate for the purpose of `gn + # analyze`. + analyze_rust("${_target_name}_analyze") { + forward_variables_from(invoker, "*", [ "crate_root" ]) + crate_root = _crate_root + } + } + + if (_cxx_bindings != []) { + rust_cxx("${_target_name}_cxx_generated") { + testonly = _testonly + visibility = [ ":${_target_name}${_main_target_suffix}" ] + if (defined(_visibility)) { + visibility += _visibility + } + sources = _cxx_bindings + deps = _cxx_deps + _public_deps + + if (is_component_build) { + # In a component_build the cxx bindings may be linked into a shared + # library at any point up the dependency tree, so always export. + export_symbols = true + } else if (invoker.target_type == "shared_library") { + export_symbols = true + } else { + export_symbols = false + } + } + } else { + not_needed([ "_cxx_deps" ]) + } + + if (_build_unit_tests) { + _unit_test_target = "${_target_name}_unittests" + if (defined(invoker.unit_test_target)) { + _unit_test_target = invoker.unit_test_target + } + + rust_unit_test(_unit_test_target) { + forward_variables_from(invoker, [ "sources" ]) + testonly = true + crate_root = _crate_root + rustflags = _rustflags + env_out_dir = _env_out_dir + if (defined(invoker.unit_test_output_dir)) { + output_dir = invoker.unit_test_output_dir + } + deps = _rust_deps + _public_deps + aliased_deps = _rust_aliased_deps + public_deps = [ ":${_target_name}" ] + if (defined(invoker.test_deps)) { + deps += invoker.test_deps + } + inputs = [] + if (defined(invoker.inputs)) { + inputs += invoker.inputs + } + if (defined(invoker.test_inputs)) { + inputs += invoker.test_inputs + } + if (defined(invoker.executable_configs)) { + configs = [] + configs = invoker.executable_configs + } else if (!defined(configs)) { + configs = [] + } + configs += _test_configs + rustenv = _rustenv + + if (!_allow_unsafe) { + configs += [ "//build/rust:forbid_unsafe" ] + } + } + } else { + not_needed([ + "_crate_root", + "_crate_name", + "_metadata", + "_test_configs", + ]) + } + } +} + +set_defaults("rust_target") { + executable_configs = default_executable_configs + library_configs = default_compiler_configs + proc_macro_configs = default_rust_proc_macro_configs +} diff --git a/build/rust/rust_unit_test.gni b/build/rust/rust_unit_test.gni new file mode 100644 index 000000000000..9bb3055e74e6 --- /dev/null +++ b/build/rust/rust_unit_test.gni @@ -0,0 +1,138 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/rust.gni") +import("//build/rust/rust_unit_tests_group.gni") + +# Defines a Rust unit test. +# +# This generates an executable + a script that can be run on Chromium bots. +# Future iterations of this template may do something smarter with the test +# code in order to automatically contribute it to test steps on the bots. +# +# Parameters +# +# sources +# edition (optional) +# allow_unsafe (optional) +# configs (optional) +# deps (optional) +# crate_root (optional) +# features (optional) +# rustflags (optional) +# inputs (optional) +# All as in rust_static_library. +# +# Example of usage: +# +# rust_unit_test("foo_tests") { +# deps = [ +# "//third_party/rust/test_utils/v1:lib", +# ] +# sources = [ "src/lib.rs" ] +# } +# +# Implementation note: you might assume it makes sense to implement this +# in terms of rust_target in order to avoid the duplication of logic around +# features and editions. We don't do that because rust_target actually +# depends on this template in order to build embedded unit tests +# (and therefore depending on rust_target here would lead to an infinite +# import loop). + +template("rust_unit_test") { + assert(can_build_rust_unit_tests) + if (defined(invoker.crate_name)) { + _crate_name = invoker.crate_name + } else { + _crate_name = target_name + } + if (defined(invoker.crate_root)) { + _crate_root = invoker.crate_root + } else { + _crate_root = "src/lib.rs" + } + _rustflags = invoker.rustflags + if (defined(invoker.features)) { + foreach(i, invoker.features) { + _rustflags += [ "--cfg=feature=\"${i}\"" ] + } + } + _configs = invoker.configs + _edition = "2021" + if (defined(invoker.edition)) { + _edition = invoker.edition + } + _configs += [ "//build/rust:edition_${_edition}" ] + + # We require that all source files are listed, even though this is + # not a requirement for rustc. The reason is to ensure that tools + # such as `gn deps` give the correct answer, and thus we trigger + # the right test suites etc. on code change. + # TODO(crbug.com/1256930) - verify this is correct + assert(defined(invoker.sources), "sources must be listed") + + _exe_target_name = target_name + "_exe" + rust_unit_tests_group(target_name) { + deps = [ ":$_exe_target_name" ] + } + + # The OUT_DIR for a crate's tests should point to the same OUT_DIR that the + # library it's testing used. The `env_out_dir` variable can be used to specify + # that directory. + if (defined(invoker.env_out_dir)) { + _env_out_dir = invoker.env_out_dir + } else { + _env_out_dir = target_gen_dir + } + + # TODO(crbug.com/1229320): Arrange to run test executables on try bots. + # TODO(crbug.com/gn/146): Allow Rust executables to depend on C/C++ source + # sets. + # This is important in cases where Rust tests may depend upon C/C++ + # dependencies. + executable(_exe_target_name) { + testonly = true + forward_variables_from(invoker, + "*", + [ + "allow_unsafe", + "edition", + "features", + "rustflags", + "configs", + "crate_name", + "crate_root", + "env_out_dir", + ]) + if (!defined(output_name) || output_name == "") { + output_name = _crate_name + } + + rustflags = [ + "--cfg", + "feature=\"test\"", + "--test", + ] + rustflags += _rustflags + configs = [] + configs = _configs + crate_name = _crate_name + crate_root = _crate_root + if (!defined(rustenv)) { + rustenv = [] + } + + rustenv += [ "OUT_DIR=" + rebase_path(_env_out_dir) ] + metadata = { + # Consumed by "rust_unit_tests_group" gni template. + rust_unit_test_executables = [ _crate_name ] + } + } +} + +set_defaults("rust_unit_test") { + configs = default_executable_configs + deps = [] + rustflags = [] +} diff --git a/build/rust/rust_unit_tests_group.gni b/build/rust/rust_unit_tests_group.gni new file mode 100644 index 000000000000..c2cdfe4d97a9 --- /dev/null +++ b/build/rust/rust_unit_tests_group.gni @@ -0,0 +1,93 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Defines a Rust unit tests group. +# +# This generates a script that wraps 1 or more Rust unit test executables. +# Such script would typically wrap all Rust unit tests from a set of related +# crates (e.g. all crates under //base). +# +# The script is primarily useful to enable running the tests on Chromium bots, +# but it is also a convenience for having a single entry point for running +# the tests locally (without having to manually fan out to all the individual +# executables). +# +# Parameters: +# +# deps - Will be recursively traversed to discover all the Rust unit test +# executables. +# +# Example usage: +# +# # This will generate a script at out/Default/bin/run_foo_tests (or +# # run_foo_tests.bat on Windows) that wraps the executables containing +# # native Rust unit tests: +# # * out/Default/foo_crate1_unittests +# # * out/Default/foo_mixed_source_set2_rs_unittests +# # * out/Default/foo_mixed_source_set3_rs_unittests +# rust_unit_tests_group("foo_tests") { +# deps = [ +# "foo_crate1", +# "foo_mixed_source_set2", +# "foo_mixed_source_set3", +# ] +# } + +template("rust_unit_tests_group") { + assert(defined(invoker.deps), "deps must be listed") + + # As noted in the top-level comment of //testing/buildbot/gn_isolate_map.pyl + # the script *must* be in output_dir/bin/run_$target (or + # output_dir\bin\run_$target.bat on Windows). + bat = "" + if (is_win) { + bat = ".bat" + } + _script_filepath = "$root_out_dir/bin/run_${target_name}${bat}" + + # Gathering metadata provided by the rust_unit_test gni template from all of + # our dependencies. + _metadata_target_name = "${target_name}_metadata" + _metadata_filepath = "$root_build_dir/${target_name}__rust_unittest_exes.txt" + generated_file(_metadata_target_name) { + forward_variables_from(invoker, [ "deps" ], []) + + testonly = true + outputs = [ _metadata_filepath ] + data_keys = [ "rust_unit_test_executables" ] + } + + # Generating a script that can run all of the wrapped Rust unit test + # executables. + action(target_name) { + forward_variables_from(invoker, "*", []) + + testonly = true + script = "//testing/scripts/rust/generate_script.py" + inputs = [ _metadata_filepath ] + outputs = [ _script_filepath ] + + data = [ _script_filepath ] + + if (!defined(data_deps)) { + data_deps = [] + } + data_deps += [ "//testing/scripts/rust" ] + data_deps += deps + + deps += [ ":$_metadata_target_name" ] + + args = [ + "--rust-test-executables", + rebase_path(_metadata_filepath, root_build_dir), + "--exe-dir", + rebase_path(root_out_dir, root_build_dir), + "--script-path", + rebase_path(_script_filepath, root_build_dir), + ] + if (is_win) { + args += [ "--make-bat" ] + } + } +} diff --git a/build/rust/rustc_wrapper.py b/build/rust/rustc_wrapper.py new file mode 100755 index 000000000000..212ad44d023b --- /dev/null +++ b/build/rust/rustc_wrapper.py @@ -0,0 +1,157 @@ +#!/usr/bin/env python3 + +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import pathlib +import subprocess +import os +import sys +import re + +# Set up path to be able to import action_helpers. +sys.path.append( + os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir, + os.pardir, 'build')) +import action_helpers + +# This script wraps rustc for (currently) these reasons: +# * To work around some ldflags escaping performed by ninja/gn +# * To remove dependencies on some environment variables from the .d file. +# * To enable use of .rsp files. +# * To work around two gn bugs on Windows +# +# LDFLAGS ESCAPING +# +# This script performs a simple function to work around some of the +# parameter escaping performed by ninja/gn. +# +# rustc invocations are given access to {{rustflags}} and {{ldflags}}. +# We want to pass {{ldflags}} into rustc, using -Clink-args="{{ldflags}}". +# Unfortunately, ninja assumes that each item in {{ldflags}} is an +# independent command-line argument and will have escaped them appropriately +# for use on a bare command line, instead of in a string. +# +# This script converts such {{ldflags}} into individual -Clink-arg=X +# arguments to rustc. +# +# RUSTENV dependency stripping +# +# When Rust code depends on an environment variable at build-time +# (using the env! macro), rustc spots that and adds it to the .d file. +# Ninja then parses that .d file and determines that the environment +# dependency means that the target always needs to be rebuilt. +# +# That's all correct, but _we_ know that some of these environment +# variables (typically, all of them) are set by .gn files which ninja +# tracks independently. So we remove them from the .d file. +# +# RSP files: +# +# We want to put the ninja/gn variables {{rustdeps}} and {{externs}} +# in an RSP file. Unfortunately, they are space-separated variables +# but Rust requires a newline-separated input. This script duly makes +# the adjustment. This works around a gn issue: +# TODO(https://bugs.chromium.org/p/gn/issues/detail?id=249): fix this +# +# WORKAROUND WINDOWS BUGS: +# +# On Windows platforms, this temporarily works around some issues in gn. +# See comments inline, linking to the relevant gn fixes. +# +# Usage: +# rustc_wrapper.py --rustc --depfile +# -- LDFLAGS {{ldflags}} RUSTENV {{rustenv}} +# The LDFLAGS token is discarded, and everything after that is converted +# to being a series of -Clink-arg=X arguments, until or unless RUSTENV +# is encountered, after which those are interpreted as environment +# variables to pass to rustc (and which will be removed from the .d file). +# +# Both LDFLAGS and RUSTENV **MUST** be specified, in that order, even if +# the list following them is empty. +# +# TODO(https://github.com/rust-lang/rust/issues/73632): avoid using rustc +# for linking in the first place. Most of our binaries are linked using +# clang directly, but there are some types of Rust build product which +# must currently be created by rustc (e.g. unit test executables). As +# part of support for using non-rustc linkers, we should arrange to extract +# such functionality from rustc so that we can make all types of binary +# using our clang toolchain. That will remove the need for most of this +# script. + + +# Equivalent of python3.9 built-in +def remove_lib_suffix_from_l_args(text): + if text.startswith("-l") and text.endswith(".lib"): + return text[:-len(".lib")] + return text + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--rustc', required=True, type=pathlib.Path) + parser.add_argument('--depfile', type=pathlib.Path) + parser.add_argument('--rsp', type=pathlib.Path) + parser.add_argument('args', metavar='ARG', nargs='+') + + args = parser.parse_args() + + remaining_args = args.args + + ldflags_separator = remaining_args.index("LDFLAGS") + rustenv_separator = remaining_args.index("RUSTENV", ldflags_separator) + rustc_args = remaining_args[:ldflags_separator] + ldflags = remaining_args[ldflags_separator + 1:rustenv_separator] + rustenv = remaining_args[rustenv_separator + 1:] + + is_windows = os.name == 'nt' + + rustc_args.extend(["-Clink-arg=%s" % arg for arg in ldflags]) + + # Workaround for https://bugs.chromium.org/p/gn/issues/detail?id=249 + if args.rsp: + with open(args.rsp) as rspfile: + rsp_args = [l.rstrip() for l in rspfile.read().split(' ') if l.rstrip()] + if is_windows: + # Work around for hard-coded string in gn; full fix will come from + # https://gn-review.googlesource.com/c/gn/+/12460 + rsp_args = [arg for arg in rsp_args if not arg.endswith("-Bdynamic")] + # Work around for "-l.lib", where ".lib" suffix is undesirable. + # Full fix will come from https://gn-review.googlesource.com/c/gn/+/12480 + rsp_args = [remove_lib_suffix_from_l_args(arg) for arg in rsp_args] + with open(args.rsp, 'w') as rspfile: + rspfile.write("\n".join(rsp_args)) + rustc_args.append(f'@{args.rsp}') + + env = os.environ.copy() + fixed_env_vars = [] + for item in rustenv: + (k, v) = item.split("=", 1) + env[k] = v + fixed_env_vars.append(k) + + r = subprocess.run([args.rustc, *rustc_args], env=env, check=False) + if r.returncode != 0: + sys.exit(r.returncode) + + # Now edit the depfile produced + if args.depfile is not None: + env_dep_re = re.compile("# env-dep:(.*)=.*") + replacement_lines = [] + dirty = False + with open(args.depfile, encoding="utf-8") as d: + for line in d: + m = env_dep_re.match(line) + if m and m.group(1) in fixed_env_vars: + dirty = True # skip this line + else: + replacement_lines.append(line) + if dirty: # we made a change, let's write out the file + with action_helpers.atomic_output(args.depfile) as output: + output.write("\n".join(replacement_lines).encode("utf-8")) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/rust/std/BUILD.gn b/build/rust/std/BUILD.gn new file mode 100644 index 000000000000..6ae92a31f296 --- /dev/null +++ b/build/rust/std/BUILD.gn @@ -0,0 +1,346 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file provides the ability for our C++ toolchain to successfully +# link binaries containing arbitrary Rust code. +# +# By "arbitrary Rust code" I mean .rlib archives full of Rust code, which +# is actually a static archive. +# +# Those static libraries don't link as-is into a final executable because +# they're designed for downstream processing by further invocations of rustc +# which link into a final binary. That final invocation of rustc knows how +# to do two things: +# * Find the Rust standard library. +# * Remap some generic allocator symbols to the specific allocator symbols +# in use. +# This file takes care of equivalent tasks for our C++ toolchains. +# C++ targets should depend upon either link_local_std or +# link_prebuilt_std to ensure that Rust code can be linked into their +# C++ executables. +# +# This is obviously a bit fragile - rustc might do other magic in future. +# But, linking with a final C++ toolchain is something often needed, and +# https://github.com/rust-lang/rust/issues/64191 aims to make this +# officially possible. + +import("//build/config/compiler/compiler.gni") +import("//build/config/rust.gni") + +if (toolchain_has_rust) { + # Equivalent of allocator symbols injected by rustc. + source_set("remap_alloc") { + sources = [ + "immediate_crash.h", + "remap_alloc.cc", + ] + } + + # List of Rust stdlib rlibs which are present in the official Rust toolchain + # we are using from the Android team. This is usually a version or two behind + # nightly. Generally this matches the toolchain we build ourselves, but if + # they differ, append or remove libraries based on the + # `use_chromium_rust_toolchain` GN variable. + # + # If the build fails due to missing symbols, it would be because of a missing + # library that needs to be added here in a newer stdlib. + stdlib_files = [ + "std", # List first because it makes depfiles more debuggable (see below) + "addr2line", + "adler", + "alloc", + "cfg_if", + "compiler_builtins", + "core", + "getopts", + "gimli", + "hashbrown", + "libc", + "memchr", + "miniz_oxide", + "object", + "panic_abort", + "panic_unwind", + "rustc_demangle", + "std_detect", + "test", + "unicode_width", + "unwind", + ] + + if (is_win) { + # Our C++ builds already link against a wide variety of Windows API import libraries, + # but the Rust stdlib requires a few extra. + _windows_lib_deps = [ + "bcrypt.lib", + "ntdll.lib", + "userenv.lib", + ] + } + + # rlibs explicitly ignored when copying prebuilt sysroot libraries. + # find_std_rlibs.py rightfully errors out if an unexpected prebuilt lib is + # encountered, since it usually indicates we missed something. This ignore + # list is also passed to it. This has no effect on the local std build. + ignore_stdlib_files = [] + + # proc_macro is special: we only run proc macros on the host, so we only want + # it for our host toolchain. + if (current_toolchain == host_toolchain_no_sanitizers) { + # Directs the local_std_for_rustc target to depend on proc_macro, and + # includes proc_macro in the prebuilts copied in find_stdlib. Otherwise it + # is not built or copied. + stdlib_files += [ "proc_macro" ] + } else { + # Explicitly ignore it from the prebuilts. Nothing needs to be done for the + # local std build. + ignore_stdlib_files += [ "proc_macro" ] + } + + # Different Rust toolchains may add or remove files relative to the above + # list. That can be specified in gn args for anyone using (for instance) + # nightly or some other experimental toolchain, prior to it becoming official. + stdlib_files -= removed_rust_stdlib_libs + stdlib_files += added_rust_stdlib_libs + + # rlib files which are distributed alongside Rust's prebuilt stdlib, but we + # don't need to pass to the C++ linker because they're used for specialized + # purposes. + skip_stdlib_files = [ + "profiler_builtins", + "rustc_std_workspace_alloc", + "rustc_std_workspace_core", + "rustc_std_workspace_std", + ] + if (prebuilt_libstd_supported) { + action("find_stdlib") { + # Collect prebuilt Rust libraries from toolchain package and copy to a known + # location. + # + # The Rust toolchain contains prebuilt rlibs for the standard library and + # its dependencies. However, they have unstable names: an unpredictable + # metadata hash is appended to the known crate name. + # + # We must depend on these rlibs explicitly when rustc is not in charge of + # linking. However, it is difficult to construct GN rules to do so when the + # names can't be known statically. + # + # This action copies the prebuilt rlibs to a known location, removing the + # metadata part of the name. In the process it verifies we have all the + # libraries we expect and none that we don't. A depfile is generated so this + # step is re-run when any libraries change. The action script additionally + # verifies rustc matches the expected version, which is unrelated but this + # is a convenient place to do so. + # + # The action refers to `stdlib_files`, `skip_stdlib_files`, and the + # associated //build/config/rust.gni vars `removed_rust_stdlib_libs` and + # `added_rust_stdlib_libs` for which rlib files to expect. + # `extra_sysroot_libs` is also used to copy non-std libs, if any. + script = "find_std_rlibs.py" + depfile = "$target_out_dir/stdlib.d" + out_libdir = rebase_path(target_out_dir, root_build_dir) + out_depfile = rebase_path(depfile, root_build_dir) + + # For the rustc sysroot we must include even the rlibs we don't pass to the + # C++ linker. + all_stdlibs_to_copy = stdlib_files + skip_stdlib_files + args = [ + "--rust-bin-dir", + rebase_path("${rust_sysroot}/bin", root_build_dir), + "--output", + out_libdir, + "--depfile", + out_depfile, + + # Due to limitations in Ninja's handling of .d files, we have to pick + # *the first* of our outputs. To make diagnostics more obviously + # related to the Rust standard library, we ensure libstd.rlib is first. + "--depfile-target", + stdlib_files[0], + + # Create a dependency on the rustc version so this action is re-run when + # it changes. This argument is not actually read by the script. + "--rustc-revision", + rustc_revision, + ] + + if (!use_unverified_rust_toolchain) { + args += [ + "--stdlibs", + string_join(",", all_stdlibs_to_copy), + ] + + if (ignore_stdlib_files != []) { + args += [ + "--ignore-stdlibs", + string_join(",", ignore_stdlib_files), + ] + } + } + + if (extra_sysroot_libs != []) { + args += [ + "--extra-libs", + string_join(",", extra_sysroot_libs), + ] + } + + args += [ + "--target", + rust_abi_target, + ] + + outputs = [] + foreach(lib, all_stdlibs_to_copy) { + outputs += [ "$target_out_dir/lib$lib.rlib" ] + } + foreach(lib, extra_sysroot_libs) { + outputs += [ "$target_out_dir/$lib" ] + } + } + } else { + not_needed([ "ignore_stdlib_files" ]) + } + + # Construct sysroots for rustc invocations to better control what libraries + # are linked. We have two: one with copied prebuilt libraries, and one with + # our locally-built std. Both reside in root_out_dir: we must only have one of + # each per GN toolchain anyway. + + sysroot_lib_subdir = "lib/rustlib/$rust_abi_target/lib" + + if (prebuilt_libstd_supported) { + prebuilt_rustc_sysroot = "$root_out_dir/prebuilt_rustc_sysroot" + copy("prebuilt_rustc_sysroot") { + deps = [ ":find_stdlib" ] + sources = get_target_outputs(":find_stdlib") + outputs = + [ "$prebuilt_rustc_sysroot/$sysroot_lib_subdir/{{source_file_part}}" ] + } + + config("prebuilt_stdlib_for_rustc") { + # Match the output directory of :prebuilt_rustc_sysroot + sysroot = rebase_path(prebuilt_rustc_sysroot, root_build_dir) + rustflags = [ "--sysroot=$sysroot" ] + } + + # Use the sysroot generated by :prebuilt_rustc_sysroot. Almost all Rust targets should depend + # on this. + group("prebuilt_std_for_rustc") { + assert( + enable_rust, + "Some C++ target is including Rust code even though enable_rust=false") + all_dependent_configs = [ ":prebuilt_stdlib_for_rustc" ] + deps = [ ":prebuilt_rustc_sysroot" ] + } + + config("prebuilt_rust_stdlib_config") { + ldflags = [] + lib_dir = rebase_path("$prebuilt_rustc_sysroot/$sysroot_lib_subdir", + root_build_dir) + + # We're unable to make these files regular gn dependencies because + # they're prebuilt. Instead, we'll pass them in the ldflags. This doesn't + # work for all types of build because ldflags propagate differently from + # actual dependencies and therefore can end up in different targets from + # the remap_alloc.cc above. For example, in a component build, we might + # apply the remap_alloc.cc file and these ldlags to shared object A, + # while shared object B (that depends upon A) might get only the ldflags + # but not remap_alloc.cc, and thus the build will fail. There is + # currently no known solution to this for the prebuilt stdlib - this + # problem does not apply with configurations where we build the stdlib + # ourselves, which is what we'll use in production. + foreach(lib, stdlib_files) { + this_file = "$lib_dir/lib$lib.rlib" + ldflags += [ this_file ] + } + if (is_win) { + # TODO(crbug.com/1434092): This should really be `libs`, however that + # breaks. Normally, we specify lib files with the `.lib` suffix but + # then when rustc links an EXE, it invokes lld-link with `.lib.lib` + # instead. + # + # Omitting the `.lib` suffix breaks linking as well, when clang drives + # the linking step of a C++ EXE that depends on Rust. + ldflags += _windows_lib_deps + } + } + + # Provides std libs to non-rustc linkers. + group("link_prebuilt_std") { + assert( + enable_rust, + "Some C++ target is including Rust code even though enable_rust=false") + all_dependent_configs = [ ":prebuilt_rust_stdlib_config" ] + deps = [ + ":prebuilt_rustc_sysroot", + ":remap_alloc", + ] + } + } + + if (local_libstd_supported) { + local_rustc_sysroot = "$root_out_dir/local_rustc_sysroot" + + # All std targets starting with core build with our sysroot. It starts empty + # and is incrementally built. The directory must exist at the start. + generated_file("empty_sysroot_for_std_build") { + outputs = [ "$local_rustc_sysroot/$sysroot_lib_subdir/.empty" ] + contents = "" + } + + config("local_stdlib_for_rustc") { + sysroot = rebase_path(local_rustc_sysroot, root_build_dir) + rustflags = [ "--sysroot=$sysroot" ] + } + + # Target to be depended on by std build targets. Creates the initially empty + # sysroot. + group("std_build_deps") { + deps = [ ":empty_sysroot_for_std_build" ] + public_configs = [ ":local_stdlib_for_rustc" ] + } + + # Use the sysroot generated by :local_rustc_sysroot, which transitively builds + # std. Only for use in specific tests for now. + group("local_std_for_rustc") { + assert( + enable_rust, + "Some C++ target is including Rust code even though enable_rust=false") + all_dependent_configs = [ ":local_stdlib_for_rustc" ] + + deps = [] + foreach(libname, stdlib_files + skip_stdlib_files) { + deps += [ "rules:$libname" ] + } + } + + config("local_rust_stdlib_config") { + if (is_win) { + # TODO(crbug.com/1434092): This should really be `libs`, however that + # breaks. Normally, we specify lib files with the `.lib` suffix but + # then when rustc links an EXE, it invokes lld-link with `.lib.lib` + # instead. + # + # Omitting the `.lib` suffix breaks linking as well, when clang drives + # the linking step of a C++ EXE that depends on Rust. + ldflags = _windows_lib_deps + } + } + + # TODO(crbug.com/1368806): rework this so when using locally-built std, we + # don't link the prebuilt std as well. + + group("link_local_std") { + assert( + enable_rust, + "Some C++ target is including Rust code even though enable_rust=false") + all_dependent_configs = [ ":local_rust_stdlib_config" ] + deps = [ + ":local_std_for_rustc", + ":remap_alloc", + ] + } + } +} diff --git a/build/rust/std/fake_root/.cargo/config.toml b/build/rust/std/fake_root/.cargo/config.toml new file mode 100644 index 000000000000..72e14991cc22 --- /dev/null +++ b/build/rust/std/fake_root/.cargo/config.toml @@ -0,0 +1,5 @@ +[source.crates-io] +replace-with = 'vendored-sources' + +[source.vendored-sources] +directory = '../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor' diff --git a/build/rust/std/fake_root/.gitignore b/build/rust/std/fake_root/.gitignore new file mode 100644 index 000000000000..e9e21997b1ac --- /dev/null +++ b/build/rust/std/fake_root/.gitignore @@ -0,0 +1,2 @@ +/target/ +/Cargo.lock diff --git a/build/rust/std/fake_root/Cargo.toml b/build/rust/std/fake_root/Cargo.toml new file mode 100644 index 000000000000..55f3a079a337 --- /dev/null +++ b/build/rust/std/fake_root/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "fake_root" +version = "0.1.0" +edition = "2021" + +[dependencies] +test = { path = "../../../../third_party/rust-toolchain/lib/rustlib/src/rust/library/test" } + +[dependencies.std] +path = "../../../../third_party/rust-toolchain/lib/rustlib/src/rust/library/std" +features = ["backtrace", "profiler"] + +[patch.crates-io] +rustc-std-workspace-core = { path = '../../../../third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-core' } +rustc-std-workspace-alloc = { path = '../../../../third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-alloc' } +rustc-std-workspace-std = { path = '../../../../third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-std' } diff --git a/build/rust/std/fake_root/README.md b/build/rust/std/fake_root/README.md new file mode 100644 index 000000000000..754a4b6b91bd --- /dev/null +++ b/build/rust/std/fake_root/README.md @@ -0,0 +1,2 @@ +This package is used to discover the libstd deps using `cargo metadata`. gnrt +uses it when generating libstd GN bindings. diff --git a/build/rust/std/fake_root/src/main.rs b/build/rust/std/fake_root/src/main.rs new file mode 100644 index 000000000000..2c54a522899c --- /dev/null +++ b/build/rust/std/fake_root/src/main.rs @@ -0,0 +1,3 @@ +// Copyright 2023 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. diff --git a/build/rust/std/find_std_rlibs.py b/build/rust/std/find_std_rlibs.py new file mode 100755 index 000000000000..85ab477a9450 --- /dev/null +++ b/build/rust/std/find_std_rlibs.py @@ -0,0 +1,164 @@ +#!/usr/bin/env/python3 + +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# See BUILD.gn in this directory for an explanation of what this script is for. + +import argparse +import os +import stat +import sys +import shutil +import subprocess +import re + +from collections import defaultdict + +EXPECTED_STDLIB_INPUT_REGEX = re.compile(r"([0-9a-z_]+)(?:-([0-9]+))?$") +RLIB_NAME_REGEX = re.compile(r"lib([0-9a-z_]+)-([0-9a-f]+)\.rlib$") + + +def main(): + parser = argparse.ArgumentParser("find_std_rlibs.py") + parser.add_argument("--rust-bin-dir", + help="Path to Rust binaries", + required=True), + parser.add_argument("--target", help="Rust target triple", required=False), + parser.add_argument("--output", + help="Path to rlibs without suffixes", + required=True) + parser.add_argument("--depfile", help="Path to write depfile", required=True) + parser.add_argument("--depfile-target", + help="Target to key depfile around", + required=True) + parser.add_argument("--stdlibs", + help="Expected list of standard library libraries") + parser.add_argument("--ignore-stdlibs", + help="List of sysroot libraries to ignore") + parser.add_argument("--extra-libs", + help="List of extra non-libstd sysroot libraries") + parser.add_argument("--rustc-revision", + help="Not used, just passed from GN to add a dependency" + " on the rustc version.") + args = parser.parse_args() + + # Expected rlibs by concise name (the crate name, plus a disambiguating suffix + # e.g. "-2" when necessary). + if args.stdlibs: + rlibs_expected = set() + for lib in args.stdlibs.split(','): + # The version is only included if there's more than one of `name`, and + # even then is only included for the 2nd onward. + (name, version) = EXPECTED_STDLIB_INPUT_REGEX.match(lib).group(1, 2) + if version is None: + rlibs_expected.add(name) + else: + rlibs_expected.add(f"{name}-{version}") + ignore_rlibs = set() + if args.ignore_stdlibs is not None: + ignore_rlibs = set(args.ignore_stdlibs.split(',')) + else: + rlibs_expected = None + + extra_libs = set() + if args.extra_libs: + for lib in args.extra_libs.split(','): + extra_libs.add(lib) + + # Ask rustc where to find the stdlib for this target. + rustc = os.path.join(args.rust_bin_dir, "rustc") + rustc_args = [rustc, "--print", "target-libdir"] + if args.target: + rustc_args.extend(["--target", args.target]) + rustlib_dir = subprocess.check_output(rustc_args).rstrip().decode() + + # Copy the rlibs to a predictable location. Whilst we're doing so, + # also write a .d file so that ninja knows it doesn't need to do this + # again unless the source rlibs change. + # Format: + # /lib: + with open(args.depfile, 'w') as depfile: + # Ninja isn't versatile at understanding depfiles. We have to say that a + # single output depends on all the inputs. We choose any one of the + # output rlibs for that purpose. If any of the input rlibs change, ninja + # will run this script again and we'll copy them all afresh. + depfile.write( + "%s:" % (os.path.join(args.output, "lib%s.rlib" % args.depfile_target))) + + def copy_file(infile, outfile): + depfile.write(f" {infile}") + if (not os.path.exists(outfile) + or os.stat(infile).st_mtime != os.stat(outfile).st_mtime): + if os.path.exists(outfile): + st = os.stat(outfile) + os.chmod(outfile, st.st_mode | stat.S_IWUSR) + shutil.copy(infile, outfile) + + # Each rlib is named "lib-.rlib". The metadata + # disambiguates multiple crates of the same name. We want to throw away the + # metadata and use stable names. To do so, we replace the metadata bit with + # a simple number 1, 2, etc. It doesn't matter how we assign these numbers + # as long as it's consistent for a particular set of rlibs. + + # The rlib names present in the Rust distribution, including metadata. We + # sort this list so crates of the same name are ordered by metadata. Also + # filter out names that aren't rlibs. + rlibs_present = [ + name for name in os.listdir(rustlib_dir) if name.endswith('.rlib') + ] + rlibs_present.sort() + + # Keep a count of the instances a crate name, so we can disambiguate the + # rlibs with an incrementing number at the end. + rlibs_seen = defaultdict(lambda: 0) + + for f in rlibs_present: + # As standard Rust includes a hash on the end of each filename + # representing certain metadata, to ensure that clients will link + # against the correct version. As gn will be manually passing + # the correct file path to our linker invocations, we don't need + # that, and it would prevent us having the predictable filenames + # which we need for statically computable gn dependency rules. + (crate_name, metadata) = RLIB_NAME_REGEX.match(f).group(1, 2) + + # Use the number of times we've seen this name to disambiguate the output + # filenames. Since we sort the input filenames including the metadata, + # this will be the same every time. + # + # Only append the times seen if it is greater than 1. This allows the + # BUILD.gn file to avoid adding '-1' to every name if there's only one + # version of a particular one. + rlibs_seen[crate_name] += 1 + if rlibs_seen[crate_name] == 1: + concise_name = crate_name + else: + concise_name = "%s-%d" % (crate_name, rlibs_seen[crate_name]) + + output_filename = f"lib{concise_name}.rlib" + + if rlibs_expected is not None: + if concise_name in ignore_rlibs: + continue + if concise_name not in rlibs_expected: + raise Exception("Found stdlib rlib that wasn't expected: %s" % f) + rlibs_expected.remove(concise_name) + + infile = os.path.join(rustlib_dir, f) + outfile = os.path.join(args.output, output_filename) + copy_file(infile, outfile) + + for f in extra_libs: + infile = os.path.join(rustlib_dir, f) + outfile = os.path.join(args.output, f) + copy_file(infile, outfile) + + depfile.write("\n") + if rlibs_expected: + raise Exception("We failed to find all expected stdlib rlibs: %s" % + ','.join(rlibs_expected)) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/rust/std/gnrt_config.toml b/build/rust/std/gnrt_config.toml new file mode 100644 index 000000000000..6caab33cb865 --- /dev/null +++ b/build/rust/std/gnrt_config.toml @@ -0,0 +1,60 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Provides per-crate and overall configuration options to gnrt. + +[all] +# force-unstable-if-unmarked prevents normal crates from inadvertently using +# symbols from std-internal dependencies in the sysroot. This is normally passed +# during an x.py build, but we have to do it manually. +rustflags = ['-Zforce-unstable-if-unmarked'] + +# Override the GN output dir. We direct std targets to output directly to the +# sysroot we'll use later. This must stay in sync with `local_rustc_sysroot` in +# //build/rust/std/BUILD.gn +output_dir = '$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/' + +# This target provides setup needed for building std. +extra_gn_deps = ['//build/rust/std:std_build_deps'] + +[crate.libc] +# Many flags are set by libc's build.rs based on new enough rustc but we don't +# run build scripts for std crates. +# See https://github.com/rust-lang/libc/blob/master/build.rs +cfg = ['libc_priv_mod_use', 'libc_union', 'libc_const_size_of', 'libc_align', +'libc_int128', 'libc_core_cvoid', 'libc_packedN', 'libc_cfg_target_vendor', +'libc_non_exhaustive', 'libc_long_array', 'libc_ptr_addr_of', +'libc_underscore_const_names', 'libc_const_extern_fn' +] + +[crate.std] +# Requires: +# * cfg(backtrace_in_libstd) because it directly includes .rs files from the +# backtrace code rather than including it as a dependency. backtrace's +# implementation has special-purpose code to handle this. +# * STD_ENV_ARCH is referenced in architecture-dependent code. Note this is the +# target arch, and as such `$rust_target_arch` is passed literally to GN. This +# variable is set at build time in build/config/rust.gni +# +# See https://github.com/rust-lang/rust/blob/master/library/std/build.rs +cfg = ['backtrace_in_libstd'] +env = ['STD_ENV_ARCH=$rust_target_arch'] + +[crate.test] +# Requires: +# * CFG_DISABLE_UNSTABLE_FEATURES=0 to match how it's built by x.py. +env = ['CFG_DISABLE_UNSTABLE_FEATURES=0'] + +# test only depends on proc_macro as an internal detail of the Rust build, so +# it's implicitly included with std/test. However, we list the std crates and +# construct the sysroot explicitly. We don't need this, and we don't even want +# it during cross-compiles (since we will only build host proc_macro crates). +exclude_deps_in_gn = ['proc_macro'] + +[crate.unwind] +# The unwind crate has #[link] directives to the native unwind library, but we +# either get that automatically by linking with clang++, or build and link the +# lib from //buildtools/third_party/libunsind explicitly. Disable -lfoo linker +# flags from this crate. +rustflags = ['-Zlink-directives=false'] diff --git a/build/rust/std/immediate_crash.h b/build/rust/std/immediate_crash.h new file mode 100644 index 000000000000..bc273502c0b1 --- /dev/null +++ b/build/rust/std/immediate_crash.h @@ -0,0 +1,170 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is copied from //base/immediate_crash.h. + +#ifndef BUILD_RUST_STD_IMMEDIATE_CRASH_H_ +#define BUILD_RUST_STD_IMMEDIATE_CRASH_H_ + +#include "build/build_config.h" + +// Crashes in the fastest possible way with no attempt at logging. +// There are several constraints; see http://crbug.com/664209 for more context. +// +// - TRAP_SEQUENCE_() must be fatal. It should not be possible to ignore the +// resulting exception or simply hit 'continue' to skip over it in a debugger. +// - Different instances of TRAP_SEQUENCE_() must not be folded together, to +// ensure crash reports are debuggable. Unlike __builtin_trap(), asm volatile +// blocks will not be folded together. +// Note: TRAP_SEQUENCE_() previously required an instruction with a unique +// nonce since unlike clang, GCC folds together identical asm volatile +// blocks. +// - TRAP_SEQUENCE_() must produce a signal that is distinct from an invalid +// memory access. +// - TRAP_SEQUENCE_() must be treated as a set of noreturn instructions. +// __builtin_unreachable() is used to provide that hint here. clang also uses +// this as a heuristic to pack the instructions in the function epilogue to +// improve code density. +// +// Additional properties that are nice to have: +// - TRAP_SEQUENCE_() should be as compact as possible. +// - The first instruction of TRAP_SEQUENCE_() should not change, to avoid +// shifting crash reporting clusters. As a consequence of this, explicit +// assembly is preferred over intrinsics. +// Note: this last bullet point may no longer be true, and may be removed in +// the future. + +// Note: TRAP_SEQUENCE Is currently split into two macro helpers due to the fact +// that clang emits an actual instruction for __builtin_unreachable() on certain +// platforms (see https://crbug.com/958675). In addition, the int3/bkpt/brk will +// be removed in followups, so splitting it up like this now makes it easy to +// land the followups. + +#if defined(COMPILER_GCC) + +#if BUILDFLAG(IS_NACL) + +// Crash report accuracy is not guaranteed on NaCl. +#define TRAP_SEQUENCE1_() __builtin_trap() +#define TRAP_SEQUENCE2_() asm volatile("") + +#elif defined(ARCH_CPU_X86_FAMILY) + +// TODO(https://crbug.com/958675): In theory, it should be possible to use just +// int3. However, there are a number of crashes with SIGILL as the exception +// code, so it seems likely that there's a signal handler that allows execution +// to continue after SIGTRAP. +#define TRAP_SEQUENCE1_() asm volatile("int3") + +#if BUILDFLAG(IS_APPLE) +// Intentionally empty: __builtin_unreachable() is always part of the sequence +// (see IMMEDIATE_CRASH below) and already emits a ud2 on Mac. +#define TRAP_SEQUENCE2_() asm volatile("") +#else +#define TRAP_SEQUENCE2_() asm volatile("ud2") +#endif // BUILDFLAG(IS_APPLE) + +#elif defined(ARCH_CPU_ARMEL) + +// bkpt will generate a SIGBUS when running on armv7 and a SIGTRAP when running +// as a 32 bit userspace app on arm64. There doesn't seem to be any way to +// cause a SIGTRAP from userspace without using a syscall (which would be a +// problem for sandboxing). +// TODO(https://crbug.com/958675): Remove bkpt from this sequence. +#define TRAP_SEQUENCE1_() asm volatile("bkpt #0") +#define TRAP_SEQUENCE2_() asm volatile("udf #0") + +#elif defined(ARCH_CPU_ARM64) + +// This will always generate a SIGTRAP on arm64. +// TODO(https://crbug.com/958675): Remove brk from this sequence. +#define TRAP_SEQUENCE1_() asm volatile("brk #0") +#define TRAP_SEQUENCE2_() asm volatile("hlt #0") + +#else + +// Crash report accuracy will not be guaranteed on other architectures, but at +// least this will crash as expected. +#define TRAP_SEQUENCE1_() __builtin_trap() +#define TRAP_SEQUENCE2_() asm volatile("") + +#endif // ARCH_CPU_* + +#elif defined(COMPILER_MSVC) + +#if !defined(__clang__) + +// MSVC x64 doesn't support inline asm, so use the MSVC intrinsic. +#define TRAP_SEQUENCE1_() __debugbreak() +#define TRAP_SEQUENCE2_() + +#elif defined(ARCH_CPU_ARM64) + +// Windows ARM64 uses "BRK #F000" as its breakpoint instruction, and +// __debugbreak() generates that in both VC++ and clang. +#define TRAP_SEQUENCE1_() __debugbreak() +// Intentionally empty: __builtin_unreachable() is always part of the sequence +// (see IMMEDIATE_CRASH below) and already emits a ud2 on Win64, +// https://crbug.com/958373 +#define TRAP_SEQUENCE2_() __asm volatile("") + +#else + +#define TRAP_SEQUENCE1_() asm volatile("int3") +#define TRAP_SEQUENCE2_() asm volatile("ud2") + +#endif // __clang__ + +#else + +#error No supported trap sequence! + +#endif // COMPILER_GCC + +#define TRAP_SEQUENCE_() \ + do { \ + TRAP_SEQUENCE1_(); \ + TRAP_SEQUENCE2_(); \ + } while (false) + +// CHECK() and the trap sequence can be invoked from a constexpr function. +// This could make compilation fail on GCC, as it forbids directly using inline +// asm inside a constexpr function. However, it allows calling a lambda +// expression including the same asm. +// The side effect is that the top of the stacktrace will not point to the +// calling function, but to this anonymous lambda. This is still useful as the +// full name of the lambda will typically include the name of the function that +// calls CHECK() and the debugger will still break at the right line of code. +#if !defined(COMPILER_GCC) || defined(__clang__) + +#define WRAPPED_TRAP_SEQUENCE_() TRAP_SEQUENCE_() + +#else + +#define WRAPPED_TRAP_SEQUENCE_() \ + do { \ + [] { TRAP_SEQUENCE_(); }(); \ + } while (false) + +#endif // !defined(COMPILER_GCC) || defined(__clang__) + +#if defined(__clang__) || defined(COMPILER_GCC) + +// __builtin_unreachable() hints to the compiler that this is noreturn and can +// be packed in the function epilogue. +#define IMMEDIATE_CRASH() \ + ({ \ + WRAPPED_TRAP_SEQUENCE_(); \ + __builtin_unreachable(); \ + }) + +#else + +// This is supporting non-chromium user of logging.h to build with MSVC, like +// pdfium. On MSVC there is no __builtin_unreachable(). +#define IMMEDIATE_CRASH() WRAPPED_TRAP_SEQUENCE_() + +#endif // defined(__clang__) || defined(COMPILER_GCC) + +#endif // BUILD_RUST_STD_IMMEDIATE_CRASH_H_ diff --git a/build/rust/std/remap_alloc.cc b/build/rust/std/remap_alloc.cc new file mode 100644 index 000000000000..7f8aa1d7b6f8 --- /dev/null +++ b/build/rust/std/remap_alloc.cc @@ -0,0 +1,152 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include +#include +#include +#include + +#include "build/build_config.h" +#include "build/rust/std/immediate_crash.h" + +#if BUILDFLAG(IS_ANDROID) +#include +#endif + +// When linking a final binary, rustc has to pick between either: +// * The default Rust allocator +// * Any #[global_allocator] defined in *any rlib in its dependency tree* +// (https://doc.rust-lang.org/edition-guide/rust-2018/platform-and-target-support/global-allocators.html) +// +// In this latter case, this fact will be recorded in some of the metadata +// within the .rlib file. (An .rlib file is just a .a file, but does have +// additional metadata for use by rustc. This is, as far as I know, the only +// such metadata we would ideally care about.) +// +// In all the linked rlibs, +// * If 0 crates define a #[global_allocator], rustc uses its default allocator +// * If 1 crate defines a #[global_allocator], rustc uses that +// * If >1 crates define a #[global_allocator], rustc bombs out. +// +// Because rustc does these checks, it doesn't just have the __rust_alloc +// symbols defined anywhere (neither in the stdlib nor in any of these +// crates which have a #[global_allocator] defined.) +// +// Instead: +// Rust's final linking stage invokes dynamic LLVM codegen to create symbols +// for the basic heap allocation operations. It literally creates a +// __rust_alloc symbol at link time. Unless any crate has specified a +// #[global_allocator], it simply calls from __rust_alloc into +// __rdl_alloc, which is the default Rust allocator. The same applies to a +// few other symbols. +// +// We're not (always) using rustc for final linking. For cases where we're not +// Rustc as the final linker, we'll define those symbols here instead. +// +// The Rust stdlib on Windows uses GetProcessHeap() which will bypass +// PartitionAlloc, so we do not forward these functions back to the stdlib. +// Instead, we pass them to PartitionAlloc, while replicating functionality from +// the unix stdlib to allow them to provide their increased functionality on top +// of the system functions. +// +// In future, we may build a crate with a #[global_allocator] and +// redirect these symbols back to Rust in order to use to that crate instead. +// +// Instead of going through system functions like malloc() we may want to call +// into PA directly if we wished for Rust allocations to be in a different +// partition, or similar, in the future. +// +// They're weak symbols, because this file will sometimes end up in targets +// which are linked by rustc, and thus we would otherwise get duplicate +// definitions. The following definitions will therefore only end up being +// used in targets which are linked by our C++ toolchain. + +extern "C" { + +#ifdef COMPONENT_BUILD +#define REMAP_ALLOC_ATTRIBUTES \ + __attribute__((visibility("default"))) __attribute__((weak)) +#else +#define REMAP_ALLOC_ATTRIBUTES __attribute__((weak)) +#endif // COMPONENT_BUILD + +void* REMAP_ALLOC_ATTRIBUTES __rust_alloc(size_t size, size_t align) { + // This mirrors kMaxSupportedAlignment from + // base/allocator/partition_allocator/partition_alloc_constants.h. + // ParitionAlloc will crash if given an alignment larger than this. + constexpr size_t max_align = (1 << 21) / 2; + if (align > max_align) { + return nullptr; + } + + if (align <= alignof(std::max_align_t)) { + return malloc(size); + } else { + // Note: PartitionAlloc by default will route aligned allocations back to + // malloc() (the fast path) if they are for a small enough alignment. So we + // just unconditionally use aligned allocation functions here. + // https://source.chromium.org/chromium/chromium/src/+/refs/heads/main:base/allocator/partition_allocator/shim/allocator_shim_default_dispatch_to_partition_alloc.cc;l=219-226;drc=31d99ff4aa0cc0b75063325ff243e911516a5a6a + +#if defined(COMPILER_MSVC) + // Because we use PartitionAlloc() as the allocator, free() is able to find + // this allocation, instead of the usual requirement to use _aligned_free(). + return _aligned_malloc(size, align); +#elif BUILDFLAG(IS_ANDROID) + // Android has no posix_memalign() exposed: + // https://source.chromium.org/chromium/chromium/src/+/main:base/memory/aligned_memory.cc;l=24-30;drc=e4622aaeccea84652488d1822c28c78b7115684f + return memalign(align, size); +#else + // The `align` from Rust is always a power of 2: + // https://doc.rust-lang.org/std/alloc/struct.Layout.html#method.from_size_align. + // + // We get here only if align > alignof(max_align_t), which guarantees that + // the alignment is both a power of 2 and even, which is required by + // posix_memalign(). + // + // The PartitionAlloc impl requires that the alignment is at least the same + // as pointer-alignment. std::max_align_t is at least pointer-aligned as + // well, so we satisfy that. + void* p; + auto ret = posix_memalign(&p, align, size); + return ret == 0 ? p : nullptr; +#endif + } +} + +void REMAP_ALLOC_ATTRIBUTES __rust_dealloc(void* p, size_t size, size_t align) { + free(p); +} + +void* REMAP_ALLOC_ATTRIBUTES __rust_realloc(void* p, + size_t old_size, + size_t align, + size_t new_size) { + if (align <= alignof(std::max_align_t)) { + return realloc(p, new_size); + } else { + void* out = __rust_alloc(align, new_size); + memcpy(out, p, std::min(old_size, new_size)); + return out; + } +} + +void* REMAP_ALLOC_ATTRIBUTES __rust_alloc_zeroed(size_t size, size_t align) { + if (align <= alignof(std::max_align_t)) { + return calloc(size, 1); + } else { + void* p = __rust_alloc(size, align); + memset(p, 0, size); + return p; + } +} + +void REMAP_ALLOC_ATTRIBUTES __rust_alloc_error_handler(size_t size, + size_t align) { + IMMEDIATE_CRASH(); +} + +extern const unsigned char REMAP_ALLOC_ATTRIBUTES + __rust_alloc_error_handler_should_panic = 0; + +} // extern "C" diff --git a/build/rust/std/rules/BUILD.gn b/build/rust/std/rules/BUILD.gn new file mode 100644 index 000000000000..ee52a6c56a14 --- /dev/null +++ b/build/rust/std/rules/BUILD.gn @@ -0,0 +1,878 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/cargo_crate.gni") + +cargo_crate("addr2line") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/addr2line-0.17.0/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/addr2line-0.17.0/src/lib.rs" ] + edition = "2015" + cargo_pkg_version = "0.17.0" + cargo_pkg_name = "addr2line" + cargo_pkg_description = + "A cross-platform symbolication library written in Rust, using `gimli`" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":gimli", + ":rustc_std_workspace_alloc", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + alloc = ":rustc_std_workspace_alloc__rlib" + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "alloc", + "compiler_builtins", + "core", + "rustc-dep-of-std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("adler") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/adler-1.0.2/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/adler-1.0.2/src/lib.rs" ] + edition = "2015" + cargo_pkg_version = "1.0.2" + cargo_pkg_authors = "Jonas Schievink " + cargo_pkg_name = "adler" + cargo_pkg_description = + "A simple clean-room implementation of the Adler-32 checksum" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "compiler_builtins", + "core", + "rustc-dep-of-std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("alloc") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/alloc/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/alloc/src/lib.rs" ] + edition = "2021" + cargo_pkg_version = "0.0.0" + cargo_pkg_name = "alloc" + cargo_pkg_description = "The Rust core allocation and collections library" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":core", + "//build/rust/std:std_build_deps", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("cfg_if") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/cfg-if-1.0.0/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/cfg-if-1.0.0/src/lib.rs" ] + edition = "2018" + cargo_pkg_version = "1.0.0" + cargo_pkg_authors = "Alex Crichton " + cargo_pkg_name = "cfg-if" + cargo_pkg_description = "A macro to ergonomically define an item depending on a large number of #[cfg] parameters. Structured like an if-else chain, the first matching branch is the item that gets emitted." + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "compiler_builtins", + "core", + "rustc-dep-of-std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("compiler_builtins") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/compiler_builtins-0.1.89/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/compiler_builtins-0.1.89/src/lib.rs" ] + edition = "2015" + cargo_pkg_version = "0.1.89" + cargo_pkg_authors = "Jorge Aparicio " + cargo_pkg_name = "compiler_builtins" + cargo_pkg_description = "Compiler intrinsics used by the Rust compiler. Also available for other targets if necessary!" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "compiler-builtins", + "core", + "rustc-dep-of-std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("core") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/core/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ + "//third_party/rust-toolchain/lib/rustlib/src/rust/library/core/src/lib.rs", + ] + edition = "2021" + cargo_pkg_version = "0.0.0" + cargo_pkg_name = "core" + cargo_pkg_description = "The Rust Core Library" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ "//build/rust/std:std_build_deps" ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("getopts") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/getopts-0.2.21/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/getopts-0.2.21/src/lib.rs" ] + edition = "2015" + cargo_pkg_version = "0.2.21" + cargo_pkg_authors = "The Rust Project Developers" + cargo_pkg_name = "getopts" + cargo_pkg_description = "getopts-like option parsing." + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":rustc_std_workspace_core", + ":rustc_std_workspace_std", + ":unicode_width", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + core = ":rustc_std_workspace_core__rlib" + std = ":rustc_std_workspace_std__rlib" + } + features = [ + "core", + "rustc-dep-of-std", + "std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("gimli") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/gimli-0.26.2/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/gimli-0.26.2/src/lib.rs" ] + edition = "2018" + cargo_pkg_version = "0.26.2" + cargo_pkg_name = "gimli" + cargo_pkg_description = + "A library for reading and writing the DWARF debugging format." + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":rustc_std_workspace_alloc", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + alloc = ":rustc_std_workspace_alloc__rlib" + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "alloc", + "compiler_builtins", + "core", + "read", + "read-core", + "rustc-dep-of-std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("hashbrown") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/hashbrown-0.12.3/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/hashbrown-0.12.3/src/lib.rs" ] + edition = "2021" + cargo_pkg_version = "0.12.3" + cargo_pkg_authors = "Amanieu d'Antras " + cargo_pkg_name = "hashbrown" + cargo_pkg_description = "A Rust port of Google's SwissTable hash map" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":rustc_std_workspace_alloc", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + alloc = ":rustc_std_workspace_alloc__rlib" + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "alloc", + "compiler_builtins", + "core", + "nightly", + "rustc-dep-of-std", + "rustc-internal-api", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("libc") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/libc-0.2.140/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/libc-0.2.140/src/lib.rs" ] + edition = "2015" + cargo_pkg_version = "0.2.140" + cargo_pkg_authors = "The Rust Project Developers" + cargo_pkg_name = "libc" + cargo_pkg_description = "Raw FFI bindings to platform libraries like libc." + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + features = [ + "align", + "rustc-dep-of-std", + "rustc-std-workspace-core", + ] + rustflags = [ + "--cfg=libc_priv_mod_use", + "--cfg=libc_union", + "--cfg=libc_const_size_of", + "--cfg=libc_align", + "--cfg=libc_int128", + "--cfg=libc_core_cvoid", + "--cfg=libc_packedN", + "--cfg=libc_cfg_target_vendor", + "--cfg=libc_non_exhaustive", + "--cfg=libc_long_array", + "--cfg=libc_ptr_addr_of", + "--cfg=libc_underscore_const_names", + "--cfg=libc_const_extern_fn", + "-Zforce-unstable-if-unmarked", + ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("memchr") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/memchr-2.5.0/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/memchr-2.5.0/src/lib.rs" ] + edition = "2018" + cargo_pkg_version = "2.5.0" + cargo_pkg_authors = "Andrew Gallant , bluss" + cargo_pkg_name = "memchr" + cargo_pkg_description = "Safe interface to memchr." + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "compiler_builtins", + "core", + "rustc-dep-of-std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("miniz_oxide") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/miniz_oxide-0.5.3/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/miniz_oxide-0.5.3/src/lib.rs" ] + edition = "2018" + cargo_pkg_version = "0.5.3" + cargo_pkg_authors = "Frommi , oyvindln " + cargo_pkg_name = "miniz_oxide" + cargo_pkg_description = "DEFLATE compression and decompression library rewritten in Rust based on miniz" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":adler", + ":compiler_builtins", + ":rustc_std_workspace_alloc", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + alloc = ":rustc_std_workspace_alloc__rlib" + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "alloc", + "compiler_builtins", + "core", + "rustc-dep-of-std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("object") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/object-0.29.0/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/object-0.29.0/src/lib.rs" ] + edition = "2018" + cargo_pkg_version = "0.29.0" + cargo_pkg_name = "object" + cargo_pkg_description = + "A unified interface for reading and writing object file formats." + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":memchr", + ":rustc_std_workspace_alloc", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + alloc = ":rustc_std_workspace_alloc__rlib" + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "alloc", + "archive", + "coff", + "compiler_builtins", + "core", + "elf", + "macho", + "pe", + "read_core", + "rustc-dep-of-std", + "unaligned", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("panic_abort") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/panic_abort/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/panic_abort/src/lib.rs" ] + edition = "2021" + cargo_pkg_version = "0.0.0" + cargo_pkg_name = "panic_abort" + cargo_pkg_description = "Implementation of Rust panics via process aborts" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":alloc", + ":cfg_if", + ":compiler_builtins", + ":core", + ":libc", + "//build/rust/std:std_build_deps", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("panic_unwind") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/panic_unwind/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/panic_unwind/src/lib.rs" ] + edition = "2021" + cargo_pkg_version = "0.0.0" + cargo_pkg_name = "panic_unwind" + cargo_pkg_description = "Implementation of Rust panics via stack unwinding" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":alloc", + ":cfg_if", + ":compiler_builtins", + ":core", + ":libc", + ":unwind", + "//build/rust/std:std_build_deps", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("proc_macro") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/proc_macro/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/proc_macro/src/lib.rs" ] + edition = "2021" + cargo_pkg_version = "0.0.0" + cargo_pkg_name = "proc_macro" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":core", + ":std", + "//build/rust/std:std_build_deps", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("profiler_builtins") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/profiler_builtins/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/profiler_builtins/src/lib.rs" ] + edition = "2021" + cargo_pkg_version = "0.0.0" + cargo_pkg_name = "profiler_builtins" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":core", + "//build/rust/std:std_build_deps", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("rustc_demangle") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/rustc-demangle-0.1.21/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/rustc-demangle-0.1.21/src/lib.rs" ] + edition = "2015" + cargo_pkg_version = "0.1.21" + cargo_pkg_authors = "Alex Crichton " + cargo_pkg_name = "rustc-demangle" + cargo_pkg_description = "Rust compiler symbol demangling." + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "compiler_builtins", + "core", + "rustc-dep-of-std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("rustc_std_workspace_alloc") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-alloc/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-alloc/lib.rs" ] + edition = "2021" + cargo_pkg_version = "1.99.0" + cargo_pkg_name = "rustc-std-workspace-alloc" + cargo_pkg_description = "Hack for the compiler's own build system" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":alloc", + "//build/rust/std:std_build_deps", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("rustc_std_workspace_core") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-core/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-core/lib.rs" ] + edition = "2021" + cargo_pkg_version = "1.99.0" + cargo_pkg_name = "rustc-std-workspace-core" + cargo_pkg_description = "Hack for the compiler's own build system" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":core", + "//build/rust/std:std_build_deps", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("rustc_std_workspace_std") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-std/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-std/lib.rs" ] + edition = "2021" + cargo_pkg_version = "1.99.0" + cargo_pkg_name = "rustc-std-workspace-std" + cargo_pkg_description = "Hack for the compiler's own build system" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":std", + "//build/rust/std:std_build_deps", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("std") { + crate_type = "rlib" + crate_root = + "//third_party/rust-toolchain/lib/rustlib/src/rust/library/std/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ + "//third_party/rust-toolchain/lib/rustlib/src/rust/library/std/src/lib.rs", + ] + edition = "2021" + cargo_pkg_version = "0.0.0" + cargo_pkg_name = "std" + cargo_pkg_description = "The Rust Standard Library" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":addr2line", + ":alloc", + ":cfg_if", + ":compiler_builtins", + ":core", + ":hashbrown", + ":libc", + ":miniz_oxide", + ":object", + ":panic_abort", + ":panic_unwind", + ":profiler_builtins", + ":rustc_demangle", + ":std_detect", + ":unwind", + "//build/rust/std:std_build_deps", + ] + features = [ + "addr2line", + "backtrace", + "gimli-symbolize", + "miniz_oxide", + "object", + "panic_unwind", + "profiler", + "profiler_builtins", + "std_detect_dlsym_getauxval", + "std_detect_file_io", + ] + rustenv = [ "STD_ENV_ARCH=$rust_target_arch" ] + rustflags = [ + "--cfg=backtrace_in_libstd", + "-Zforce-unstable-if-unmarked", + ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("std_detect") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/stdarch/crates/std_detect/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/stdarch/crates/std_detect/src/lib.rs" ] + edition = "2021" + cargo_pkg_version = "0.1.5" + cargo_pkg_authors = "Alex Crichton , Andrew Gallant , Gonzalo Brito Gadeschi " + cargo_pkg_name = "std_detect" + cargo_pkg_description = + "`std::detect` - Rust's standard library run-time CPU feature detection." + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":cfg_if", + ":compiler_builtins", + ":libc", + ":rustc_std_workspace_alloc", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + alloc = ":rustc_std_workspace_alloc__rlib" + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "alloc", + "compiler_builtins", + "core", + "libc", + "rustc-dep-of-std", + "std_detect_dlsym_getauxval", + "std_detect_file_io", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("test") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/test/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ + "//third_party/rust-toolchain/lib/rustlib/src/rust/library/test/src/lib.rs", + ] + edition = "2021" + cargo_pkg_version = "0.0.0" + cargo_pkg_name = "test" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":core", + ":getopts", + ":panic_abort", + ":panic_unwind", + ":std", + "//build/rust/std:std_build_deps", + ] + rustenv = [ "CFG_DISABLE_UNSTABLE_FEATURES=0" ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("unicode_width") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/unicode-width-0.1.10/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/unicode-width-0.1.10/src/lib.rs" ] + edition = "2015" + cargo_pkg_version = "0.1.10" + cargo_pkg_authors = + "kwantam , Manish Goregaokar " + cargo_pkg_name = "unicode-width" + cargo_pkg_description = "Determine displayed width of `char` and `str` types according to Unicode Standard Annex #11 rules." + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":rustc_std_workspace_core", + ":rustc_std_workspace_std", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + core = ":rustc_std_workspace_core__rlib" + std = ":rustc_std_workspace_std__rlib" + } + features = [ + "compiler_builtins", + "core", + "rustc-dep-of-std", + "std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("unwind") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/unwind/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/unwind/src/lib.rs" ] + edition = "2021" + cargo_pkg_version = "0.0.0" + cargo_pkg_name = "unwind" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":cfg_if", + ":compiler_builtins", + ":core", + ":libc", + "//build/rust/std:std_build_deps", + ] + rustflags = [ + "-Zlink-directives=false", + "-Zforce-unstable-if-unmarked", + ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} diff --git a/build/rust/tests/BUILD.gn b/build/rust/tests/BUILD.gn new file mode 100644 index 000000000000..6224e27b8568 --- /dev/null +++ b/build/rust/tests/BUILD.gn @@ -0,0 +1,97 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/rust.gni") +import("//build/rust/rust_unit_tests_group.gni") + +# Build some minimal binaries to exercise the Rust toolchain +# only if that toolchain is enabled in gn args. +group("tests") { + testonly = true + + deps = [ ":deps" ] + if (can_build_rust_unit_tests) { + deps += [ ":build_rust_tests" ] + } +} + +group("deps") { + testonly = true + deps = [] + + # All the rest require Rust. + if (toolchain_has_rust) { + deps += [ + "bindgen_test", + "test_aliased_deps", + "test_aliased_deps:test_aliased_deps_exe", + "test_bin_crate", + "test_cpp_including_rust", + "test_rlib_crate:target1", + "test_rlib_crate:target2", + "test_rlib_crate:test_rlib_crate_associated_bin", + "test_rust_exe", + "test_rust_multiple_dep_versions_exe", + "test_rust_shared_library", + "test_rust_static_library", + "test_serde_json_lenient", + "test_simple_rust_exe", + + # TODO(https://crbug.com/1329611): Enable the additional target below + # once `rs_bindings_from_cc` is distributed via `gclient sync`. In the + # meantime see the instructions in + # `//build/rust/run_rs_bindings_from_cc.py`. + #"test_rs_bindings_from_cc:test_rs_bindings_from_cc", + ] + if (can_build_rust_unit_tests) { + deps += [ + "bindgen_test:bindgen_test_lib_unittests", + "test_aliased_deps:test_aliased_deps_unittests", + "test_cpp_including_rust:test_cpp_including_rust_unittests", + "test_rlib_crate:target1_test_rlib_crate_v0_2_unittests", + "test_rlib_crate:target2_test_rlib_crate_v0_2_unittests", + "test_rust_exe:test_rust_exe_unittests", + "test_rust_multiple_dep_versions_exe/v1:test_lib_v1_unittests", + "test_rust_multiple_dep_versions_exe/v2:test_lib_v2_unittests", + "test_rust_shared_library:test_rust_shared_library_unittests", + "test_rust_static_library:test_rust_static_library_unittests", + "test_rust_static_library_non_standard_arrangement:foo_tests", + "test_rust_unittests", + + # TODO(https://crbug.com/1329611): Enable the additional target below + # once `rs_bindings_from_cc` is distributed via `gclient sync`. In the + # meantime see the instructions in + # `//build/rust/run_rs_bindings_from_cc.py`. + #"test_rs_bindings_from_cc:test_rs_bindings_from_cc_unittests", + ] + if (current_toolchain == host_toolchain_no_sanitizers) { + # Build these proc macro tests only on toolchains where we'd build the + # proc macro itself. + deps += [ "test_proc_macro_crate:test_proc_macro_crate_v0_2_unittests" ] + } + } + + if (local_libstd_supported) { + deps += [ + "test_local_std", + "test_local_std:test_local_std_exe", + ] + if (can_build_rust_unit_tests) { + deps += [ "test_local_std:test_local_std_unittests" ] + } + } + + if (is_win) { + deps += [ "test_control_flow_guard" ] + } + } +} + +if (can_build_rust_unit_tests) { + # Generates a script that will run all the native Rust unit tests, in order + # to have them all part of a single test step on infra bots. + rust_unit_tests_group("build_rust_tests") { + deps = [ ":deps" ] + } +} diff --git a/build/rust/tests/bindgen_test/BUILD.gn b/build/rust/tests/bindgen_test/BUILD.gn new file mode 100644 index 000000000000..ce0fbc43539f --- /dev/null +++ b/build/rust/tests/bindgen_test/BUILD.gn @@ -0,0 +1,48 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_bindgen.gni") +import("//build/rust/rust_executable.gni") +import("//build/rust/rust_static_library.gni") + +source_set("c_lib_headers") { + sources = [ + "lib.h", + "lib2.h", + ] +} + +component("c_lib") { + sources = [ "lib.c" ] + + deps = [ ":c_lib_headers" ] + + defines = [ "COMPONENT_IMPLEMENTATION" ] +} + +rust_bindgen("c_lib_bindgen") { + header = "lib.h" + deps = [ ":c_lib_headers" ] +} + +rust_static_library("bindgen_test_lib") { + allow_unsafe = true + deps = [ + ":c_lib", + ":c_lib_bindgen", + ] + sources = [ "src/lib.rs" ] + build_native_rust_unit_tests = true + crate_root = "src/lib.rs" + + bindgen_output = get_target_outputs(":c_lib_bindgen") + inputs = bindgen_output + rustenv = [ "BINDGEN_RS_FILE=" + rebase_path(bindgen_output[0]) ] +} + +rust_executable("bindgen_test") { + deps = [ ":bindgen_test_lib" ] + sources = [ "main.rs" ] + crate_root = "main.rs" +} diff --git a/build/rust/tests/bindgen_test/lib.c b/build/rust/tests/bindgen_test/lib.c new file mode 100644 index 000000000000..3223772e1387 --- /dev/null +++ b/build/rust/tests/bindgen_test/lib.c @@ -0,0 +1,11 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "build/rust/tests/bindgen_test/lib.h" + +#include + +COMPONENT_EXPORT uint32_t add_two_numbers(uint32_t a, uint32_t b) { + return a + b; +} diff --git a/build/rust/tests/bindgen_test/lib.h b/build/rust/tests/bindgen_test/lib.h new file mode 100644 index 000000000000..a6d686e82707 --- /dev/null +++ b/build/rust/tests/bindgen_test/lib.h @@ -0,0 +1,45 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef BUILD_RUST_TESTS_BINDGEN_TEST_LIB_H_ +#define BUILD_RUST_TESTS_BINDGEN_TEST_LIB_H_ + +#include "build/rust/tests/bindgen_test/lib2.h" + +#include + +// The following is equivalent to //base/base_export.h. + +#if defined(COMPONENT_BUILD) +#if defined(WIN32) + +#if defined(COMPONENT_IMPLEMENTATION) +#define COMPONENT_EXPORT __declspec(dllexport) +#else +#define COMPONENT_EXPORT __declspec(dllimport) +#endif // defined(COMPONENT_IMPLEMENTATION) + +#else // defined(WIN32) +#if defined(COMPONENT_IMPLEMENTATION) +#define COMPONENT_EXPORT __attribute__((visibility("default"))) +#else +#define COMPONENT_EXPORT +#endif // defined(COMPONENT_IMPLEMENTATION) +#endif + +#else // defined(COMPONENT_BUILD) +#define COMPONENT_EXPORT +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +COMPONENT_EXPORT uint32_t add_two_numbers(uint32_t a, uint32_t b); + +#ifdef __cplusplus +} +#endif + +#endif // BUILD_RUST_TESTS_BINDGEN_TEST_LIB_H_ diff --git a/build/rust/tests/bindgen_test/lib2.h b/build/rust/tests/bindgen_test/lib2.h new file mode 100644 index 000000000000..f747a6f53b9f --- /dev/null +++ b/build/rust/tests/bindgen_test/lib2.h @@ -0,0 +1,10 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef BUILD_RUST_TESTS_BINDGEN_TEST_LIB2_H_ +#define BUILD_RUST_TESTS_BINDGEN_TEST_LIB2_H_ + +// This file does nothing, it just tests the include paths when running bindgen. + +#endif // BUILD_RUST_TESTS_BINDGEN_TEST_LIB2_H_ diff --git a/build/rust/tests/bindgen_test/main.rs b/build/rust/tests/bindgen_test/main.rs new file mode 100644 index 000000000000..499d93daf89b --- /dev/null +++ b/build/rust/tests/bindgen_test/main.rs @@ -0,0 +1,9 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +use bindgen_test_lib::add_two_numbers_in_c; + +fn main() { + println!("{} + {} = {}", 3, 7, add_two_numbers_in_c(3, 7)); +} diff --git a/build/rust/tests/bindgen_test/src/lib.rs b/build/rust/tests/bindgen_test/src/lib.rs new file mode 100644 index 000000000000..c8672e06aab7 --- /dev/null +++ b/build/rust/tests/bindgen_test/src/lib.rs @@ -0,0 +1,25 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +mod c_ffi { + #![allow(dead_code)] + #![allow(non_snake_case)] + #![allow(non_camel_case_types)] + #![allow(non_upper_case_globals)] + include!(env!("BINDGEN_RS_FILE")); +} + +pub fn add_two_numbers_in_c(a: u32, b: u32) -> u32 { + unsafe { c_ffi::add_two_numbers(a, b) } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_add_two_numbers() { + assert_eq!(add_two_numbers_in_c(5, 10), 15); + } +} diff --git a/build/rust/tests/test_aliased_deps/BUILD.gn b/build/rust/tests/test_aliased_deps/BUILD.gn new file mode 100644 index 000000000000..45ad73f44d3a --- /dev/null +++ b/build/rust/tests/test_aliased_deps/BUILD.gn @@ -0,0 +1,30 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/rust.gni") +import("//build/rust/rust_executable.gni") +import("//build/rust/rust_static_library.gni") + +rust_executable("test_aliased_deps_exe") { + crate_root = "main.rs" + sources = [ crate_root ] + deps = [ ":test_aliased_deps" ] +} + +rust_static_library("test_aliased_deps") { + crate_root = "lib.rs" + sources = [ crate_root ] + deps = [ ":real_name" ] + aliased_deps = { + # Unfortunately we have to know the `__rlib` suffix which is attached to the + # actual rlib in `rust_static_library()`. + other_name = ":real_name__rlib" + } + build_native_rust_unit_tests = true +} + +rust_static_library("real_name") { + crate_root = "real_name.rs" + sources = [ crate_root ] +} diff --git a/build/rust/tests/test_aliased_deps/lib.rs b/build/rust/tests/test_aliased_deps/lib.rs new file mode 100644 index 000000000000..dcaa3431c52e --- /dev/null +++ b/build/rust/tests/test_aliased_deps/lib.rs @@ -0,0 +1,11 @@ +// Copyright 2023 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +pub use other_name; + +#[cfg(test)] +#[test] +fn test_add_from_renamed_dep() { + assert_eq!(other_name::add(2, 3), 5); +} diff --git a/build/rust/tests/test_aliased_deps/main.rs b/build/rust/tests/test_aliased_deps/main.rs new file mode 100644 index 000000000000..8f33abecefd8 --- /dev/null +++ b/build/rust/tests/test_aliased_deps/main.rs @@ -0,0 +1,7 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +fn main() { + test_aliased_deps::other_name::hello_world(); +} diff --git a/build/rust/tests/test_aliased_deps/real_name.rs b/build/rust/tests/test_aliased_deps/real_name.rs new file mode 100644 index 000000000000..15f084f3f412 --- /dev/null +++ b/build/rust/tests/test_aliased_deps/real_name.rs @@ -0,0 +1,11 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +pub fn hello_world() { + println!("hello world"); +} + +pub fn add(a: u32, b: u32) -> u32 { + a + b +} diff --git a/build/rust/tests/test_bin_crate/BUILD.gn b/build/rust/tests/test_bin_crate/BUILD.gn new file mode 100644 index 000000000000..ac47ee0a4cd7 --- /dev/null +++ b/build/rust/tests/test_bin_crate/BUILD.gn @@ -0,0 +1,15 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/cargo_crate.gni") + +cargo_crate("test_bin_crate") { + crate_type = "bin" + crate_root = "crate/src/main.rs" + sources = [ "crate/src/main.rs" ] + build_sources = [ "crate/build.rs" ] + build_root = "crate/build.rs" + + rustenv = [ "BUILD_SCRIPT_TEST_VARIABLE=123" ] +} diff --git a/build/rust/tests/test_bin_crate/crate/build.rs b/build/rust/tests/test_bin_crate/crate/build.rs new file mode 100644 index 000000000000..a1051eb6cd1a --- /dev/null +++ b/build/rust/tests/test_bin_crate/crate/build.rs @@ -0,0 +1,62 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +use std::env; +use std::process::Command; +use std::str::{self, FromStr}; + +fn main() { + println!("cargo:rustc-cfg=build_script_ran"); + let minor = match rustc_minor_version() { + Some(minor) => minor, + None => return, + }; + + let target = env::var("TARGET").unwrap(); + + if minor >= 34 { + println!("cargo:rustc-cfg=is_new_rustc"); + } else { + println!("cargo:rustc-cfg=is_old_rustc"); + } + + if target.contains("android") { + println!("cargo:rustc-cfg=is_android"); + } + if target.contains("darwin") { + println!("cargo:rustc-cfg=is_mac"); + } + + // Check that we can get a `rustenv` variable from the build script. + let _ = env!("BUILD_SCRIPT_TEST_VARIABLE"); +} + +fn rustc_minor_version() -> Option { + let rustc = match env::var_os("RUSTC") { + Some(rustc) => rustc, + None => return None, + }; + + let output = match Command::new(rustc).arg("--version").output() { + Ok(output) => output, + Err(_) => return None, + }; + + let version = match str::from_utf8(&output.stdout) { + Ok(version) => version, + Err(_) => return None, + }; + + let mut pieces = version.split('.'); + if pieces.next() != Some("rustc 1") { + return None; + } + + let next = match pieces.next() { + Some(next) => next, + None => return None, + }; + + u32::from_str(next).ok() +} diff --git a/build/rust/tests/test_bin_crate/crate/src/main.rs b/build/rust/tests/test_bin_crate/crate/src/main.rs new file mode 100644 index 000000000000..08fff49b94c3 --- /dev/null +++ b/build/rust/tests/test_bin_crate/crate/src/main.rs @@ -0,0 +1,15 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +fn main() { + println!("Hello, world!"); + #[cfg(is_new_rustc)] + println!("Is new rustc!"); + #[cfg(is_old_rustc)] + println!("Is old rustc!"); + #[cfg(is_android)] + println!("Is android!"); + #[cfg(is_mac)] + println!("Is darwin!"); +} diff --git a/build/rust/tests/test_control_flow_guard/BUILD.gn b/build/rust/tests/test_control_flow_guard/BUILD.gn new file mode 100644 index 000000000000..202c5b0f0f3c --- /dev/null +++ b/build/rust/tests/test_control_flow_guard/BUILD.gn @@ -0,0 +1,14 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_executable.gni") + +rust_executable("test_control_flow_guard") { + allow_unsafe = true + crate_root = "test_control_flow_guard.rs" + sources = [ crate_root ] + + # Used as a data dep by base_unittests. + is_data_dep = true +} diff --git a/build/rust/tests/test_control_flow_guard/test_control_flow_guard.rs b/build/rust/tests/test_control_flow_guard/test_control_flow_guard.rs new file mode 100644 index 000000000000..d303d3dc8328 --- /dev/null +++ b/build/rust/tests/test_control_flow_guard/test_control_flow_guard.rs @@ -0,0 +1,43 @@ +// Copyright 2023 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +//! To test that CFG is working, build this executable on Windows and run it +//! as: +//! +//! `out\Release\cdb\cdb.exe -G -g -o .\out\Release\test_control_flow_guard.exe` +//! +//! Which should print: +//! ``` +//! (a2d4.bcd8): Security check failure or stack buffer overrun - code c0000409 +//! (!!! second chance !!!) +//! Subcode: 0xa FAST_FAIL_GUARD_ICALL_CHECK_FAILURE +//! ``` +//! +//! If cdb.exe is not present, first run `ninja -C out\Release cdb\cdb.exe`. + +use std::arch::asm; + +#[cfg(any(target_arch = "x86", target_arch = "x86_64"))] +const NOP_INSTRUCTION_SIZE: usize = 1; +#[cfg(target_arch = "aarch64")] +const NOP_INSTRUCTION_SIZE: usize = 4; + +#[inline(never)] +fn nop_sled() { + unsafe { asm!("nop", "nop", "ret",) } +} + +#[inline(never)] +fn indirect_call(func: fn()) { + func(); +} + +fn main() { + let fptr = + unsafe { std::mem::transmute::(nop_sled as usize + NOP_INSTRUCTION_SIZE) }; + // Generates a FAST_FAIL_GUARD_ICALL_CHECK_FAILURE if CFG triggers. + indirect_call(fptr); + // Should only reach here if CFG is disabled. + eprintln!("failed"); +} diff --git a/build/rust/tests/test_cpp_including_rust/BUILD.gn b/build/rust/tests/test_cpp_including_rust/BUILD.gn new file mode 100644 index 000000000000..2157b79880c3 --- /dev/null +++ b/build/rust/tests/test_cpp_including_rust/BUILD.gn @@ -0,0 +1,23 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//base/allocator/allocator.gni") +import("//testing/test.gni") + +executable("test_cpp_including_rust") { + sources = [ "main.cc" ] + deps = [ "//build/rust/tests/test_rust_static_library" ] +} + +test("test_cpp_including_rust_unittests") { + sources = [ "unittests.cc" ] + deps = [ + "//base", + "//base/allocator:buildflags", + "//base/test:run_all_unittests", + "//build/rust/tests/test_rust_static_library", + "//testing/gmock", + "//testing/gtest", + ] +} diff --git a/build/rust/tests/test_cpp_including_rust/main.cc b/build/rust/tests/test_cpp_including_rust/main.cc new file mode 100644 index 000000000000..d515a3464459 --- /dev/null +++ b/build/rust/tests/test_cpp_including_rust/main.cc @@ -0,0 +1,11 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "build/rust/tests/test_rust_static_library/src/lib.rs.h" + +int main(int argc, char* argv[]) { + say_hello(); + add_two_ints_via_rust(3, 4); + return 0; +} diff --git a/build/rust/tests/test_cpp_including_rust/unittests.cc b/build/rust/tests/test_cpp_including_rust/unittests.cc new file mode 100644 index 000000000000..f3b65ad2f40c --- /dev/null +++ b/build/rust/tests/test_cpp_including_rust/unittests.cc @@ -0,0 +1,31 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include + +#include + +#include "base/allocator/buildflags.h" +#include "base/allocator/partition_allocator/address_pool_manager_bitmap.h" +#include "base/allocator/partition_allocator/partition_address_space.h" +#include "build/build_config.h" +#include "build/buildflag.h" +#include "testing/gtest/include/gtest/gtest.h" + +#include "build/rust/tests/test_rust_static_library/src/lib.rs.h" + +TEST(RustTest, CppCallingIntoRust_BasicFFI) { + EXPECT_EQ(7, add_two_ints_via_rust(3, 4)); +} + +TEST(RustTest, RustComponentUsesPartitionAlloc) { + // Verify that PartitionAlloc is consistently used in C++ and Rust. + auto cpp_allocated_int = std::make_unique(); + SomeStruct* rust_allocated_ptr = allocate_via_rust().into_raw(); + EXPECT_EQ(partition_alloc::IsManagedByPartitionAlloc( + reinterpret_cast(rust_allocated_ptr)), + partition_alloc::IsManagedByPartitionAlloc( + reinterpret_cast(cpp_allocated_int.get()))); + rust::Box::from_raw(rust_allocated_ptr); +} diff --git a/build/rust/tests/test_local_std/BUILD.gn b/build/rust/tests/test_local_std/BUILD.gn new file mode 100644 index 000000000000..499aebdd7dc4 --- /dev/null +++ b/build/rust/tests/test_local_std/BUILD.gn @@ -0,0 +1,23 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/rust.gni") +import("//build/rust/rust_executable.gni") +import("//build/rust/rust_static_library.gni") + +assert(local_libstd_supported) + +rust_static_library("test_local_std") { + sources = [ "lib.rs" ] + crate_root = "lib.rs" + build_native_rust_unit_tests = true + use_local_std = true +} + +rust_executable("test_local_std_exe") { + sources = [ "main.rs" ] + crate_root = "main.rs" + deps = [ ":test_local_std" ] + use_local_std = true +} diff --git a/build/rust/tests/test_local_std/lib.rs b/build/rust/tests/test_local_std/lib.rs new file mode 100644 index 000000000000..6328cf415d44 --- /dev/null +++ b/build/rust/tests/test_local_std/lib.rs @@ -0,0 +1,8 @@ +// Copyright 2023 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#[test] +fn test_test() { + assert_eq!(1, 1); +} diff --git a/build/rust/tests/test_local_std/main.rs b/build/rust/tests/test_local_std/main.rs new file mode 100644 index 000000000000..746e0216ed2e --- /dev/null +++ b/build/rust/tests/test_local_std/main.rs @@ -0,0 +1,7 @@ +// Copyright 2023 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +fn main() { + println!("hello world"); +} diff --git a/build/rust/tests/test_proc_macro_crate/BUILD.gn b/build/rust/tests/test_proc_macro_crate/BUILD.gn new file mode 100644 index 000000000000..c9b56a4232f2 --- /dev/null +++ b/build/rust/tests/test_proc_macro_crate/BUILD.gn @@ -0,0 +1,12 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/cargo_crate.gni") + +cargo_crate("test_proc_macro_crate") { + crate_root = "crate/src/lib.rs" + crate_type = "proc-macro" + sources = [ "crate/src/lib.rs" ] + epoch = "0.2" +} diff --git a/build/rust/tests/test_proc_macro_crate/crate/src/lib.rs b/build/rust/tests/test_proc_macro_crate/crate/src/lib.rs new file mode 100644 index 000000000000..6d4025fd74bf --- /dev/null +++ b/build/rust/tests/test_proc_macro_crate/crate/src/lib.rs @@ -0,0 +1,10 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +use proc_macro::TokenStream; + +#[proc_macro] +pub fn calculate_using_proc_macro(_item: TokenStream) -> TokenStream { + "(15 + 15)".parse().unwrap() +} diff --git a/build/rust/tests/test_rlib_crate/BUILD.gn b/build/rust/tests/test_rlib_crate/BUILD.gn new file mode 100644 index 000000000000..9410316a2da8 --- /dev/null +++ b/build/rust/tests/test_rlib_crate/BUILD.gn @@ -0,0 +1,55 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/cargo_crate.gni") + +cargo_crate("target1") { + crate_name = "test_rlib_crate" + crate_root = "crate/src/lib.rs" + sources = [ "crate/src/lib.rs" ] + build_sources = [ "crate/build.rs" ] + build_root = "crate/build.rs" + build_script_outputs = [ "generated/generated.rs" ] + epoch = "0.2" + features = [ + "my-feature_a", + "my-feature_b", + ] + rustflags = [ + "--cfg", + "test_a_and_b", + ] + rustenv = [ "ENV_VAR_FOR_BUILD_SCRIPT=42" ] +} + +# Test that we can build the same crate in multiple ways under different GN +# rules without conflicts. +cargo_crate("target2") { + crate_name = "test_rlib_crate" + crate_root = "crate/src/lib.rs" + sources = [ "crate/src/lib.rs" ] + build_sources = [ "crate/build.rs" ] + build_root = "crate/build.rs" + build_script_outputs = [ "generated/generated.rs" ] + epoch = "0.2" + features = [ "my-feature_a" ] + rustenv = [ "ENV_VAR_FOR_BUILD_SCRIPT=42" ] +} + +# Exists to test the case that a single crate has both a library +# and a binary, to ensure that shared build products (e.g. the +# build script) don't conflict. +cargo_crate("test_rlib_crate_associated_bin") { + crate_root = "crate/src/main.rs" + crate_type = "bin" + sources = [ "crate/src/main.rs" ] + build_sources = [ "crate/build.rs" ] + build_root = "crate/build.rs" + features = [ + "my-feature_a", + "my-feature_b", + ] + rustenv = [ "ENV_VAR_FOR_BUILD_SCRIPT=42" ] + deps = [ ":target1" ] +} diff --git a/build/rust/tests/test_rlib_crate/crate/build.rs b/build/rust/tests/test_rlib_crate/crate/build.rs new file mode 100644 index 000000000000..037e2635bce2 --- /dev/null +++ b/build/rust/tests/test_rlib_crate/crate/build.rs @@ -0,0 +1,90 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +use std::env; +use std::io::Write; +use std::path::Path; +use std::process::Command; +use std::str::{self, FromStr}; + +fn main() { + println!("cargo:rustc-cfg=build_script_ran"); + let minor = match rustc_minor_version() { + Some(minor) => minor, + None => return, + }; + + let target = env::var("TARGET").unwrap(); + + if minor >= 34 { + println!("cargo:rustc-cfg=is_new_rustc"); + } else { + println!("cargo:rustc-cfg=is_old_rustc"); + } + + if target.contains("android") { + println!("cargo:rustc-cfg=is_android"); + } + if target.contains("darwin") { + println!("cargo:rustc-cfg=is_mac"); + } + + let feature_a_enabled = env::var_os("CARGO_FEATURE_MY_FEATURE_A").is_some(); + if feature_a_enabled { + println!("cargo:rustc-cfg=has_feature_a"); + } + let feature_b_enabled = env::var_os("CARGO_FEATURE_MY_FEATURE_B").is_some(); + if feature_b_enabled { + println!("cargo:rustc-cfg=has_feature_b"); + } + + // Some tests as to whether we're properly emulating various cargo features. + assert!(Path::new(&env::var_os("CARGO_MANIFEST_DIR").unwrap()).join("build.rs").exists()); + assert!(Path::new("build.rs").exists()); + assert!(Path::new(&env::var_os("OUT_DIR").unwrap()).exists()); + // Confirm the following env var is set, but do not attempt to validate content + // since the whole point is that it will differ on different platforms. + env::var_os("CARGO_CFG_TARGET_ARCH").unwrap(); + + generate_some_code().unwrap(); +} + +fn generate_some_code() -> std::io::Result<()> { + let output_dir = Path::new(&env::var_os("OUT_DIR").unwrap()).join("generated"); + let _ = std::fs::create_dir_all(&output_dir); + // Test that environment variables from .gn files are passed to build scripts + let preferred_number = env::var("ENV_VAR_FOR_BUILD_SCRIPT").unwrap(); + let mut file = std::fs::File::create(output_dir.join("generated.rs"))?; + write!(file, "fn run_some_generated_code() -> u32 {{ {} }}", preferred_number)?; + Ok(()) +} + +fn rustc_minor_version() -> Option { + let rustc = match env::var_os("RUSTC") { + Some(rustc) => rustc, + None => return None, + }; + + let output = match Command::new(rustc).arg("--version").output() { + Ok(output) => output, + Err(_) => return None, + }; + + let version = match str::from_utf8(&output.stdout) { + Ok(version) => version, + Err(_) => return None, + }; + + let mut pieces = version.split('.'); + if pieces.next() != Some("rustc 1") { + return None; + } + + let next = match pieces.next() { + Some(next) => next, + None => return None, + }; + + u32::from_str(next).ok() +} diff --git a/build/rust/tests/test_rlib_crate/crate/src/lib.rs b/build/rust/tests/test_rlib_crate/crate/src/lib.rs new file mode 100644 index 000000000000..4fb672286310 --- /dev/null +++ b/build/rust/tests/test_rlib_crate/crate/src/lib.rs @@ -0,0 +1,56 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +include!(concat!(env!("OUT_DIR"), "/generated/generated.rs")); + +pub fn say_hello_from_crate() { + assert_eq!(run_some_generated_code(), 42); + #[cfg(is_new_rustc)] + println!("Is new rustc!"); + #[cfg(is_old_rustc)] + println!("Is old rustc!"); + #[cfg(is_android)] + println!("Is android!"); + #[cfg(is_mac)] + println!("Is darwin!"); + #[cfg(has_feature_a)] + println!("Has feature A!"); + #[cfg(not(has_feature_a))] + panic!("Wasn't passed feature a"); + #[cfg(not(has_feature_b))] + #[cfg(test_a_and_b)] + panic!("Wasn't passed feature b"); + #[cfg(has_feature_b)] + #[cfg(not(test_a_and_b))] + panic!("Was passed feature b"); +} + +#[cfg(test)] +mod tests { + /// Test features are passed through from BUILD.gn correctly. This test is + /// the target1 configuration. + #[test] + #[cfg(test_a_and_b)] + fn test_features_passed_target1() { + #[cfg(not(has_feature_a))] + panic!("Wasn't passed feature a"); + #[cfg(not(has_feature_b))] + panic!("Wasn't passed feature b"); + } + + /// This tests the target2 configuration is passed through correctly. + #[test] + #[cfg(not(test_a_and_b))] + fn test_features_passed_target2() { + #[cfg(not(has_feature_a))] + panic!("Wasn't passed feature a"); + #[cfg(has_feature_b)] + panic!("Was passed feature b"); + } + + #[test] + fn test_generated_code_works() { + assert_eq!(crate::run_some_generated_code(), 42); + } +} diff --git a/build/rust/tests/test_rlib_crate/crate/src/main.rs b/build/rust/tests/test_rlib_crate/crate/src/main.rs new file mode 100644 index 000000000000..ba0d6151b6ab --- /dev/null +++ b/build/rust/tests/test_rlib_crate/crate/src/main.rs @@ -0,0 +1,7 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +pub fn main() { + test_rlib_crate::say_hello_from_crate(); +} diff --git a/build/rust/tests/test_rs_bindings_from_cc/BUILD.gn b/build/rust/tests/test_rs_bindings_from_cc/BUILD.gn new file mode 100644 index 000000000000..525faa38d9fd --- /dev/null +++ b/build/rust/tests/test_rs_bindings_from_cc/BUILD.gn @@ -0,0 +1,58 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rs_bindings_from_cc.gni") +import("//build/rust/rust_executable.gni") + +rust_executable("test_rs_bindings_from_cc") { + crate_root = "main.rs" + sources = [ "main.rs" ] + deps = [ + ":self_contained_target_rs_api", + ":target_depending_on_another_rs_api", + "//third_party/crubit:ctor", + ] + build_native_rust_unit_tests = true +} + +rs_bindings_from_cc("self_contained_target_rs_api") { + # This is the target that the bindings are for / call into. + bindings_target = ":self_contained_target" + + # Lists public headers from `sources` of `self_contained_target`. + public_headers = [ + "self_contained_target_header1.h", + "self_contained_target_header2.h", + ] +} + +source_set("self_contained_target") { + sources = [ + "self_contained_target_header1.h", + "self_contained_target_header2.cc", + "self_contained_target_header2.h", + ] +} + +rs_bindings_from_cc("target_depending_on_another_rs_api") { + # This is the target that the bindings are for / call into. + bindings_target = ":target_depending_on_another" + + # Lists public headers from `sources` of `target_depending_on_another`. + # + # TODO(crbug.com/1297592): Is there something we can do (a convention?) to + # avoid this duplication/repetition? + public_headers = [ "target_depending_on_another.h" ] + + # Parallels `public_deps` of `target_depending_on_another` + # + # TODO(crbug.com/1297592): Is there something we can do (a convention?) to + # avoid this duplication/repetition? + deps = [ ":self_contained_target_rs_api" ] +} + +source_set("target_depending_on_another") { + sources = [ "target_depending_on_another.h" ] + public_deps = [ ":self_contained_target" ] +} diff --git a/build/rust/tests/test_rs_bindings_from_cc/main.rs b/build/rust/tests/test_rs_bindings_from_cc/main.rs new file mode 100644 index 000000000000..d20f45c20511 --- /dev/null +++ b/build/rust/tests/test_rs_bindings_from_cc/main.rs @@ -0,0 +1,32 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +fn main() { + println!("Hello world!"); + println!("AddViaCc(100,42) = {}", ::self_contained_target_rs_api::AddViaCc(100, 42)); + println!("MultiplyViaCc(100,42) = {}", ::self_contained_target_rs_api::MultiplyViaCc(100, 42)); +} + +#[cfg(test)] +mod tests { + #[test] + fn test_self_contained_target_function_call_basics() { + assert_eq!(100 + 42, ::self_contained_target_rs_api::AddViaCc(100, 42)); + assert_eq!(100 * 42, ::self_contained_target_rs_api::MultiplyViaCc(100, 42)); + } + + #[test] + fn test_self_contained_target_pod_struct_basics() { + let x = ::self_contained_target_rs_api::CcPodStruct { value: 123 }; + assert_eq!(x.value, 123); + } + + #[test] + fn test_target_depending_on_another() { + ctor::emplace! { + let x = ::target_depending_on_another_rs_api::CreateCcPodStructFromValue(456); + } + assert_eq!(x.value, 456); + } +} diff --git a/build/rust/tests/test_rs_bindings_from_cc/self_contained_target_header1.h b/build/rust/tests/test_rs_bindings_from_cc/self_contained_target_header1.h new file mode 100644 index 000000000000..13da6b111425 --- /dev/null +++ b/build/rust/tests/test_rs_bindings_from_cc/self_contained_target_header1.h @@ -0,0 +1,12 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef BUILD_RUST_TESTS_TEST_RS_BINDINGS_FROM_CC_SELF_CONTAINED_TARGET_HEADER1_H_ +#define BUILD_RUST_TESTS_TEST_RS_BINDINGS_FROM_CC_SELF_CONTAINED_TARGET_HEADER1_H_ + +inline int MultiplyViaCc(int x, int y) { + return x * y; +} + +#endif // BUILD_RUST_TESTS_TEST_RS_BINDINGS_FROM_CC_SELF_CONTAINED_TARGET_HEADER1_H_ diff --git a/build/rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.cc b/build/rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.cc new file mode 100644 index 000000000000..002e0a5ab052 --- /dev/null +++ b/build/rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.cc @@ -0,0 +1,9 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "build/rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.h" + +int AddViaCc(int x, int y) { + return x + y; +} diff --git a/build/rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.h b/build/rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.h new file mode 100644 index 000000000000..fabe75a01eda --- /dev/null +++ b/build/rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.h @@ -0,0 +1,14 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef BUILD_RUST_TESTS_TEST_RS_BINDINGS_FROM_CC_SELF_CONTAINED_TARGET_HEADER2_H_ +#define BUILD_RUST_TESTS_TEST_RS_BINDINGS_FROM_CC_SELF_CONTAINED_TARGET_HEADER2_H_ + +int AddViaCc(int x, int y); + +struct CcPodStruct final { + int value; +}; + +#endif // BUILD_RUST_TESTS_TEST_RS_BINDINGS_FROM_CC_SELF_CONTAINED_TARGET_HEADER2_H_ diff --git a/build/rust/tests/test_rs_bindings_from_cc/target_depending_on_another.h b/build/rust/tests/test_rs_bindings_from_cc/target_depending_on_another.h new file mode 100644 index 000000000000..824282eb41e2 --- /dev/null +++ b/build/rust/tests/test_rs_bindings_from_cc/target_depending_on_another.h @@ -0,0 +1,14 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef BUILD_RUST_TESTS_TEST_RS_BINDINGS_FROM_CC_TARGET_DEPENDING_ON_ANOTHER_H_ +#define BUILD_RUST_TESTS_TEST_RS_BINDINGS_FROM_CC_TARGET_DEPENDING_ON_ANOTHER_H_ + +#include "build/rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.h" + +inline CcPodStruct CreateCcPodStructFromValue(int x) { + return CcPodStruct{.value = x}; +} + +#endif // BUILD_RUST_TESTS_TEST_RS_BINDINGS_FROM_CC_TARGET_DEPENDING_ON_ANOTHER_H_ diff --git a/build/rust/tests/test_rust_exe/BUILD.gn b/build/rust/tests/test_rust_exe/BUILD.gn new file mode 100644 index 000000000000..493854a496db --- /dev/null +++ b/build/rust/tests/test_rust_exe/BUILD.gn @@ -0,0 +1,17 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_executable.gni") + +rust_executable("test_rust_exe") { + crate_root = "main.rs" + sources = [ "main.rs" ] + deps = [ + "//build/rust/tests/test_proc_macro_crate", + "//build/rust/tests/test_rlib_crate:target1", + "//build/rust/tests/test_rust_static_library", + "//build/rust/tests/test_rust_static_library_non_standard_arrangement", + ] + build_native_rust_unit_tests = true +} diff --git a/build/rust/tests/test_rust_exe/main.rs b/build/rust/tests/test_rust_exe/main.rs new file mode 100644 index 000000000000..0409901f0f38 --- /dev/null +++ b/build/rust/tests/test_rust_exe/main.rs @@ -0,0 +1,32 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +use test_rlib_crate::say_hello_from_crate; + +fn main() { + assert_eq!(test_proc_macro_crate::calculate_using_proc_macro!(), 30); + assert_eq!(test_rust_static_library::add_two_ints_via_rust(3, 4), 7); + assert_eq!(test_rust_static_library_non_standard_arrangement::do_subtract(4, 3), 1); + say_hello_from_crate(); +} + +/// These tests are largely all to just test different permutations of builds, +/// e.g. calling into mixed_static_librarys, crates, proc macros, etc. +#[cfg(test)] +mod tests { + #[test] + fn test_call_to_rust() { + assert_eq!(test_rust_static_library::add_two_ints_via_rust(3, 4), 7); + } + + #[test] + fn test_call_to_rust_non_standard_arrangement() { + assert_eq!(test_rust_static_library_non_standard_arrangement::do_subtract(8, 4), 4); + } + + #[test] + fn test_proc_macro() { + assert_eq!(test_proc_macro_crate::calculate_using_proc_macro!(), 30) + } +} diff --git a/build/rust/tests/test_rust_multiple_dep_versions_exe/BUILD.gn b/build/rust/tests/test_rust_multiple_dep_versions_exe/BUILD.gn new file mode 100644 index 000000000000..c4d4785d09f5 --- /dev/null +++ b/build/rust/tests/test_rust_multiple_dep_versions_exe/BUILD.gn @@ -0,0 +1,25 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_executable.gni") +import("//build/rust/rust_static_library.gni") + +# The exe depends on lib v1.But it also transitively depends on lib v2. +# The code in the exe should use v1, and the code in the transitive lib should +# use v2. +rust_executable("test_rust_multiple_dep_versions_exe") { + crate_root = "main.rs" + sources = [ "main.rs" ] + deps = [ + ":transitive_v2", + "//build/rust/tests/test_rust_multiple_dep_versions_exe/v1:test_lib", + ] +} + +rust_static_library("transitive_v2") { + crate_root = "transitive_lib.rs" + sources = [ "transitive_lib.rs" ] + deps = + [ "//build/rust/tests/test_rust_multiple_dep_versions_exe/v2:test_lib" ] +} diff --git a/build/rust/tests/test_rust_multiple_dep_versions_exe/main.rs b/build/rust/tests/test_rust_multiple_dep_versions_exe/main.rs new file mode 100644 index 000000000000..e5471db246aa --- /dev/null +++ b/build/rust/tests/test_rust_multiple_dep_versions_exe/main.rs @@ -0,0 +1,8 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +fn main() { + test_lib::say_hello_from_v1(); + transitive_v2::transitively_say_hello(); +} diff --git a/build/rust/tests/test_rust_multiple_dep_versions_exe/transitive_lib.rs b/build/rust/tests/test_rust_multiple_dep_versions_exe/transitive_lib.rs new file mode 100644 index 000000000000..51806d79e7a3 --- /dev/null +++ b/build/rust/tests/test_rust_multiple_dep_versions_exe/transitive_lib.rs @@ -0,0 +1,7 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +pub fn transitively_say_hello() { + test_lib::say_hello_from_v2(); +} diff --git a/build/rust/tests/test_rust_multiple_dep_versions_exe/v1/BUILD.gn b/build/rust/tests/test_rust_multiple_dep_versions_exe/v1/BUILD.gn new file mode 100644 index 000000000000..0704a1659cee --- /dev/null +++ b/build/rust/tests/test_rust_multiple_dep_versions_exe/v1/BUILD.gn @@ -0,0 +1,12 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/cargo_crate.gni") + +cargo_crate("test_lib") { + # This crate has the same name as v2/test_lib, but a different epoch. The GN + # target for the unit tests should not collide. + epoch = "1" + sources = [ "src/lib.rs" ] +} diff --git a/build/rust/tests/test_rust_multiple_dep_versions_exe/v1/src/lib.rs b/build/rust/tests/test_rust_multiple_dep_versions_exe/v1/src/lib.rs new file mode 100644 index 000000000000..bc95a76a81fc --- /dev/null +++ b/build/rust/tests/test_rust_multiple_dep_versions_exe/v1/src/lib.rs @@ -0,0 +1,7 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +pub fn say_hello_from_v1() { + println!("Hello, world - from lib version 1"); +} diff --git a/build/rust/tests/test_rust_multiple_dep_versions_exe/v2/BUILD.gn b/build/rust/tests/test_rust_multiple_dep_versions_exe/v2/BUILD.gn new file mode 100644 index 000000000000..3fada7b58ddd --- /dev/null +++ b/build/rust/tests/test_rust_multiple_dep_versions_exe/v2/BUILD.gn @@ -0,0 +1,12 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/cargo_crate.gni") + +cargo_crate("test_lib") { + # This crate has the same name as v1/test_lib, but a different epoch. The GN + # target for the unit tests should not collide. + epoch = "2" + sources = [ "src/lib.rs" ] +} diff --git a/build/rust/tests/test_rust_multiple_dep_versions_exe/v2/src/lib.rs b/build/rust/tests/test_rust_multiple_dep_versions_exe/v2/src/lib.rs new file mode 100644 index 000000000000..b8035a1d76b6 --- /dev/null +++ b/build/rust/tests/test_rust_multiple_dep_versions_exe/v2/src/lib.rs @@ -0,0 +1,7 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +pub fn say_hello_from_v2() { + println!("Hello, world - from lib version 2"); +} diff --git a/build/rust/tests/test_rust_shared_library/BUILD.gn b/build/rust/tests/test_rust_shared_library/BUILD.gn new file mode 100644 index 000000000000..f2396c7618f4 --- /dev/null +++ b/build/rust/tests/test_rust_shared_library/BUILD.gn @@ -0,0 +1,12 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_shared_library.gni") + +rust_shared_library("test_rust_shared_library") { + allow_unsafe = true + sources = [ "src/lib.rs" ] + cxx_bindings = [ "src/lib.rs" ] + build_native_rust_unit_tests = true +} diff --git a/build/rust/tests/test_rust_shared_library/src/lib.rs b/build/rust/tests/test_rust_shared_library/src/lib.rs new file mode 100644 index 000000000000..eabfa274af6c --- /dev/null +++ b/build/rust/tests/test_rust_shared_library/src/lib.rs @@ -0,0 +1,41 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Requires this allow since cxx generates unsafe code. +// +// TODO(crbug.com/1422745): patch upstream cxx to generate compatible code. +#[allow(unsafe_op_in_unsafe_fn)] +#[cxx::bridge] +mod ffi { + pub struct SomeStruct { + a: i32, + } + extern "Rust" { + fn say_hello(); + fn allocate_via_rust() -> Box; + fn add_two_ints_via_rust(x: i32, y: i32) -> i32; + } +} + +pub fn say_hello() { + println!( + "Hello, world - from a Rust library. Calculations suggest that 3+4={}", + add_two_ints_via_rust(3, 4) + ); +} + +#[test] +fn test_hello() { + assert_eq!(7, add_two_ints_via_rust(3, 4)); +} + +pub fn add_two_ints_via_rust(x: i32, y: i32) -> i32 { + x + y +} + +// The next function is used from the +// AllocatorTest.RustComponentUsesPartitionAlloc unit test. +pub fn allocate_via_rust() -> Box { + Box::new(ffi::SomeStruct { a: 43 }) +} diff --git a/build/rust/tests/test_rust_static_library/BUILD.gn b/build/rust/tests/test_rust_static_library/BUILD.gn new file mode 100644 index 000000000000..28a48cbc10cb --- /dev/null +++ b/build/rust/tests/test_rust_static_library/BUILD.gn @@ -0,0 +1,12 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_static_library.gni") + +rust_static_library("test_rust_static_library") { + allow_unsafe = true + sources = [ "src/lib.rs" ] + cxx_bindings = [ "src/lib.rs" ] + build_native_rust_unit_tests = true +} diff --git a/build/rust/tests/test_rust_static_library/src/lib.rs b/build/rust/tests/test_rust_static_library/src/lib.rs new file mode 100644 index 000000000000..1fcabe3f2c9c --- /dev/null +++ b/build/rust/tests/test_rust_static_library/src/lib.rs @@ -0,0 +1,48 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Requires this allow since cxx generates unsafe code. +// +// TODO(crbug.com/1422745): patch upstream cxx to generate compatible code. +#[allow(unsafe_op_in_unsafe_fn)] +#[cxx::bridge] +mod ffi { + pub struct SomeStruct { + a: i32, + } + extern "Rust" { + fn say_hello(); + fn allocate_via_rust() -> Box; + fn add_two_ints_via_rust(x: i32, y: i32) -> i32; + } +} + +pub fn say_hello() { + println!( + "Hello, world - from a Rust library. Calculations suggest that 3+4={}", + add_two_ints_via_rust(3, 4) + ); +} + +#[test] +fn test_hello() { + assert_eq!(7, add_two_ints_via_rust(3, 4)); +} + +pub fn add_two_ints_via_rust(x: i32, y: i32) -> i32 { + x + y +} + +// The next function is used from the +// AllocatorTest.RustComponentUsesPartitionAlloc unit test. +pub fn allocate_via_rust() -> Box { + Box::new(ffi::SomeStruct { a: 43 }) +} + +mod tests { + #[test] + fn test_in_mod() { + // Always passes; just to see if tests in modules are handled correctly. + } +} diff --git a/build/rust/tests/test_rust_static_library_non_standard_arrangement/BUILD.gn b/build/rust/tests/test_rust_static_library_non_standard_arrangement/BUILD.gn new file mode 100644 index 000000000000..6a85557a6325 --- /dev/null +++ b/build/rust/tests/test_rust_static_library_non_standard_arrangement/BUILD.gn @@ -0,0 +1,15 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_static_library.gni") + +rust_static_library("test_rust_static_library_non_standard_arrangement") { + sources = [ "foo.rs" ] + crate_root = "foo.rs" + unit_test_target = "foo_tests" + + # TODO(danakj): We should write a gtest binary instead of using native rust + # tests outside of a cargo_crate(). + build_native_rust_unit_tests = true +} diff --git a/build/rust/tests/test_rust_static_library_non_standard_arrangement/foo.rs b/build/rust/tests/test_rust_static_library_non_standard_arrangement/foo.rs new file mode 100644 index 000000000000..197333950a30 --- /dev/null +++ b/build/rust/tests/test_rust_static_library_non_standard_arrangement/foo.rs @@ -0,0 +1,12 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +pub extern "C" fn do_subtract(a: u32, b: u32) -> u32 { + a - b +} + +#[test] +fn test_ok() { + assert_eq!(do_subtract(12, 8), 4) +} diff --git a/build/rust/tests/test_rust_unittests/BUILD.gn b/build/rust/tests/test_rust_unittests/BUILD.gn new file mode 100644 index 000000000000..bdc1f84a69ef --- /dev/null +++ b/build/rust/tests/test_rust_unittests/BUILD.gn @@ -0,0 +1,11 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_unit_test.gni") + +rust_unit_test("test_rust_unittests") { + sources = [ "main.rs" ] + crate_root = "main.rs" + deps = [ "//build/rust/tests/test_rust_static_library" ] +} diff --git a/build/rust/tests/test_rust_unittests/main.rs b/build/rust/tests/test_rust_unittests/main.rs new file mode 100644 index 000000000000..a10b006d1a51 --- /dev/null +++ b/build/rust/tests/test_rust_unittests/main.rs @@ -0,0 +1,20 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#![feature(test)] +extern crate test; + +use test::Bencher; +use test_rust_static_library::add_two_ints_via_rust; + +#[test] +fn test_call_into_mixed_static_library() { + assert_eq!(add_two_ints_via_rust(5, 7), 12) +} + +#[allow(soft_unstable)] +#[bench] +fn test_benchmark(b: &mut Bencher) { + b.iter(|| 2 + 2); +} diff --git a/build/rust/tests/test_serde_json_lenient/BUILD.gn b/build/rust/tests/test_serde_json_lenient/BUILD.gn new file mode 100644 index 000000000000..dbc954e08ec2 --- /dev/null +++ b/build/rust/tests/test_serde_json_lenient/BUILD.gn @@ -0,0 +1,27 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_static_library.gni") +import("//testing/test.gni") + +rust_static_library("test_serde_json_lenient_rs") { + crate_root = "lib.rs" + allow_unsafe = true + sources = [ "lib.rs" ] + cxx_bindings = [ "lib.rs" ] + deps = [ "//third_party/rust/serde_json_lenient/v0_1:lib" ] +} + +# TODO(https://crbug.com/1278030) - convert to a pure- +# Rust unit test when that's supported on all platforms. +test("test_serde_json_lenient") { + sources = [ "unittests.cc" ] + deps = [ + ":test_serde_json_lenient_rs", + "//base", + "//base/test:run_all_unittests", + "//testing/gmock", + "//testing/gtest", + ] +} diff --git a/build/rust/tests/test_serde_json_lenient/lib.rs b/build/rust/tests/test_serde_json_lenient/lib.rs new file mode 100644 index 000000000000..92bfe8594b07 --- /dev/null +++ b/build/rust/tests/test_serde_json_lenient/lib.rs @@ -0,0 +1,29 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Demo library to ensure that serde_json_lenient is working independently of +// its integration with Chromium. + +use serde_json_lenient::{Result, Value}; + +#[cxx::bridge] +mod ffi { + extern "Rust" { + fn serde_works() -> bool; + } +} + +fn serde_works() -> bool { + parses_ok().unwrap_or_default() +} + +fn parses_ok() -> Result { + let data = r#" + { + "name": "Slartibartfast", + "planets": [ "Magrathea" ] + }"#; + let v: Value = serde_json_lenient::from_str(data)?; + Ok(v["name"] == "Slartibartfast" && v["planets"][0] == "Magrathea") +} diff --git a/build/rust/tests/test_serde_json_lenient/unittests.cc b/build/rust/tests/test_serde_json_lenient/unittests.cc new file mode 100644 index 000000000000..d6d9866a1f6c --- /dev/null +++ b/build/rust/tests/test_serde_json_lenient/unittests.cc @@ -0,0 +1,10 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "build/rust/tests/test_serde_json_lenient/lib.rs.h" +#include "testing/gtest/include/gtest/gtest.h" + +TEST(RustTest, SerdeJsonTest) { + EXPECT_EQ(true, serde_works()); +} diff --git a/build/rust/tests/test_simple_rust_exe/BUILD.gn b/build/rust/tests/test_simple_rust_exe/BUILD.gn new file mode 100644 index 000000000000..a800720fbcb2 --- /dev/null +++ b/build/rust/tests/test_simple_rust_exe/BUILD.gn @@ -0,0 +1,12 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This should be the simplest possible exe with no dependencies, +# to make it easy to investigate build problems. +# For this reason it uses 'executable' directly rather than +# //build/rust/rust_executable.gni. +executable("test_simple_rust_exe") { + crate_root = "main.rs" + deps = [ "//build/rust/std:local_std_for_rustc" ] +} diff --git a/build/rust/tests/test_simple_rust_exe/main.rs b/build/rust/tests/test_simple_rust_exe/main.rs new file mode 100644 index 000000000000..e03684fc7f76 --- /dev/null +++ b/build/rust/tests/test_simple_rust_exe/main.rs @@ -0,0 +1,7 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +fn main() { + println!("Hello, world!"); +} diff --git a/build/sanitize-mac-build-log.sed b/build/sanitize-mac-build-log.sed index b4111c7b828a..23c579eb9a7c 100644 --- a/build/sanitize-mac-build-log.sed +++ b/build/sanitize-mac-build-log.sed @@ -1,4 +1,4 @@ -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/sanitize-mac-build-log.sh b/build/sanitize-mac-build-log.sh index df5a7af29eb6..8bd56d7d564a 100755 --- a/build/sanitize-mac-build-log.sh +++ b/build/sanitize-mac-build-log.sh @@ -1,5 +1,5 @@ #!/bin/sh -# Copyright (c) 2010 The Chromium Authors. All rights reserved. +# Copyright 2010 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed diff --git a/build/sanitize-win-build-log.sed b/build/sanitize-win-build-log.sed index c18e664c83a0..6077e6c75325 100644 --- a/build/sanitize-win-build-log.sed +++ b/build/sanitize-win-build-log.sed @@ -1,4 +1,4 @@ -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/sanitize-win-build-log.sh b/build/sanitize-win-build-log.sh index df5a7af29eb6..8bd56d7d564a 100755 --- a/build/sanitize-win-build-log.sh +++ b/build/sanitize-win-build-log.sh @@ -1,5 +1,5 @@ #!/bin/sh -# Copyright (c) 2010 The Chromium Authors. All rights reserved. +# Copyright 2010 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed diff --git a/build/sanitizers/OWNERS b/build/sanitizers/OWNERS index b893bc80e94b..09f2efa99adb 100644 --- a/build/sanitizers/OWNERS +++ b/build/sanitizers/OWNERS @@ -1,7 +1,5 @@ -ochang@chromium.org eugenis@chromium.org glider@chromium.org -inferno@chromium.org metzman@chromium.org rnk@chromium.org per-file tsan_suppressions.cc=* diff --git a/build/sanitizers/asan_suppressions.cc b/build/sanitizers/asan_suppressions.cc index f3b9459ded0d..9fbcd8f0c465 100644 --- a/build/sanitizers/asan_suppressions.cc +++ b/build/sanitizers/asan_suppressions.cc @@ -1,4 +1,4 @@ -// Copyright 2015 The Chromium Authors. All rights reserved. +// Copyright 2015 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/build/sanitizers/lsan_suppressions.cc b/build/sanitizers/lsan_suppressions.cc index 9c0fec46618f..82e5df1711b9 100644 --- a/build/sanitizers/lsan_suppressions.cc +++ b/build/sanitizers/lsan_suppressions.cc @@ -1,4 +1,4 @@ -// Copyright 2015 The Chromium Authors. All rights reserved. +// Copyright 2015 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -7,6 +7,8 @@ // LSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to // http://dev.chromium.org/developers/testing/leaksanitizer for more info. +#include "build/build_config.h" + #if defined(LEAK_SANITIZER) // Please make sure the code below declares a single string variable @@ -28,10 +30,16 @@ char kLSanDefaultSuppressions[] = // suppression works. http://crbug.com/605286 "leak:__strdup\n" - // Leaks in Nvidia's libGL. + // Leaks in GL and Vulkan drivers and system libraries on Linux NVIDIA "leak:libGL.so\n" "leak:libGLX_nvidia.so\n" + "leak:libnvidia-cbl.so\n" + "leak:libnvidia-fatbinaryloader.so\n" "leak:libnvidia-glcore.so\n" + "leak:libnvidia-rtcore.so\n" + "leak:nvidia0\n" + "leak:nvidiactl\n" + "leak:libdbus-1.so\n" // XRandR has several one time leaks. "leak:libxrandr\n" @@ -41,6 +49,7 @@ char kLSanDefaultSuppressions[] = // http://crbug.com/431213, http://crbug.com/416665 "leak:gin/object_template_builder.h\n" + "leak:gin/function_template.h\n" // Leaks in swrast_dri.so. http://crbug.com/540042 "leak:swrast_dri.so\n" @@ -48,6 +57,12 @@ char kLSanDefaultSuppressions[] = // Leak in glibc's gconv caused by fopen(..., "r,ccs=UNICODE") "leak:__gconv_lookup_cache\n" + // Leak in libnssutil. crbug.com/1290634 + "leak:libnssutil3\n" + + // Suppress leaks from unknown third party modules. http://anglebug.com/6937 + "leak:\n" + // ================ Leaks in Chromium code ================ // PLEASE DO NOT ADD SUPPRESSIONS FOR NEW LEAKS. // Instead, commits that introduce memory leaks should be reverted. @@ -55,9 +70,6 @@ char kLSanDefaultSuppressions[] = // impossible, i.e. when enabling leak detection for the first time for a // test target with pre-existing leaks. - // https://crbug.com/755670 - "leak:third_party/yasm/\n" - // v8 leaks caused by weak ref not call "leak:blink::DOMWrapperWorld::Create\n" "leak:blink::ScriptState::Create\n" @@ -69,6 +81,27 @@ char kLSanDefaultSuppressions[] = // Suppress leaks in CreateCdmInstance. https://crbug.com/961062 "leak:media::CdmAdapter::CreateCdmInstance\n" +#if BUILDFLAG(IS_CHROMEOS) + // Suppress leak in FileStream. crbug.com/1263374 + "leak:chromeos::PipeReader::StartIO\n" + // Supppress AnimationObserverToHideView leak. crbug.com/1261464 + "leak:ash::ShelfNavigationWidget::UpdateButtonVisibility\n" + // Suppress AnimationSequence leak. crbug.com/1265031 + "leak:ash::LockStateController::StartPostLockAnimation\n" + // Suppress leak in SurfaceDrawContext. crbug.com/1265033 + "leak:skgpu::v1::SurfaceDrawContext::drawGlyphRunList\n" + // Suppress leak in BluetoothServerSocket. crbug.com/1278970 + "leak:nearby::chrome::BluetoothServerSocket::" + "BluetoothServerSocket\n" + // Suppress leak in NearbyConnectionBrokerImpl. crbug.com/1279578 + "leak:ash::secure_channel::NearbyConnectionBrokerImpl\n" + // Suppress leak in NearbyEndpointFinderImpl. crbug.com/1288577 + "leak:ash::secure_channel::NearbyEndpointFinderImpl::~" + "NearbyEndpointFinderImpl\n" + // Suppress leak in DelayedCallbackGroup test. crbug.com/1279563 + "leak:DelayedCallbackGroup_TimeoutAndRun_Test\n" +#endif + // PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS. // End of suppressions. diff --git a/build/sanitizers/sanitizer_options.cc b/build/sanitizers/sanitizer_options.cc index 0df3e5a7d486..b2ee0204419d 100644 --- a/build/sanitizers/sanitizer_options.cc +++ b/build/sanitizers/sanitizer_options.cc @@ -1,4 +1,4 @@ -// Copyright 2014 The Chromium Authors. All rights reserved. +// Copyright 2014 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // @@ -10,13 +10,6 @@ #if defined(ADDRESS_SANITIZER) || defined(LEAK_SANITIZER) || \ defined(MEMORY_SANITIZER) || defined(THREAD_SANITIZER) || \ defined(UNDEFINED_SANITIZER) -// Functions returning default options are declared weak in the tools' runtime -// libraries. To make the linker pick the strong replacements for those -// functions from this module, we explicitly force its inclusion by passing -// -Wl,-u_sanitizer_options_link_helper -extern "C" -void _sanitizer_options_link_helper() { } - // The callbacks we define here will be called from the sanitizer runtime, but // aren't referenced from the Chrome executable. We must ensure that those // callbacks are not sanitizer-instrumented, and that they aren't stripped by @@ -26,14 +19,18 @@ void _sanitizer_options_link_helper() { } __attribute__((no_sanitize("address", "memory", "thread", "undefined"))) \ __attribute__((visibility("default"))) \ __attribute__((used)) + +// Functions returning default options are declared weak in the tools' runtime +// libraries. To make the linker pick the strong replacements for those +// functions from this module, we explicitly force its inclusion by passing +// -Wl,-u_sanitizer_options_link_helper +// SANITIZER_HOOK_ATTRIBUTE instead of just `extern "C"` solely to make the +// symbol externally visible, for ToolsSanityTest.LinksSanitizerOptions. +SANITIZER_HOOK_ATTRIBUTE void _sanitizer_options_link_helper() {} #endif #if defined(ADDRESS_SANITIZER) // Default options for AddressSanitizer in various configurations: -// check_printf=1 - check the memory accesses to printf (and other formatted -// output routines) arguments. -// use_sigaltstack=1 - handle signals on an alternate signal stack. Useful -// for stack overflow detection. // strip_path_prefix=/../../ - prefixes up to and including this // substring will be stripped from source file paths in symbolized reports // fast_unwind_on_fatal=1 - use the fast (frame-pointer-based) stack unwinder @@ -44,29 +41,22 @@ void _sanitizer_options_link_helper() { } // symbolize=1 - enable in-process symbolization. // external_symbolizer_path=... - provides the path to llvm-symbolizer // relative to the main executable -#if defined(OS_LINUX) || defined(OS_CHROMEOS) +#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS) | BUILDFLAG(IS_APPLE) const char kAsanDefaultOptions[] = - "check_printf=1 use_sigaltstack=1 strip_path_prefix=/../../ " - "fast_unwind_on_fatal=1 detect_stack_use_after_return=1 " - "symbolize=1 detect_leaks=0 allow_user_segv_handler=1 " + "strip_path_prefix=/../../ fast_unwind_on_fatal=1 " + "detect_stack_use_after_return=1 symbolize=1 detect_leaks=0 " "external_symbolizer_path=%d/../../third_party/llvm-build/Release+Asserts/" "bin/llvm-symbolizer"; - -#elif defined(OS_APPLE) +#elif BUILDFLAG(IS_WIN) const char* kAsanDefaultOptions = - "check_printf=1 use_sigaltstack=1 strip_path_prefix=/../../ " - "fast_unwind_on_fatal=1 detect_stack_use_after_return=1 "; - -#elif defined(OS_WIN) -const char* kAsanDefaultOptions = - "check_printf=1 use_sigaltstack=1 strip_path_prefix=\\..\\..\\ " - "fast_unwind_on_fatal=1 detect_stack_use_after_return=1 " - "symbolize=1 external_symbolizer_path=%d/../../third_party/" + "strip_path_prefix=\\..\\..\\ fast_unwind_on_fatal=1 " + "detect_stack_use_after_return=1 symbolize=1 " + "external_symbolizer_path=%d/../../third_party/" "llvm-build/Release+Asserts/bin/llvm-symbolizer.exe"; -#endif // defined(OS_LINUX) || defined(OS_CHROMEOS) +#endif // BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_APPLE) -#if defined(OS_LINUX) || defined(OS_CHROMEOS) || defined(OS_APPLE) || \ - defined(OS_WIN) +#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_APPLE) || \ + BUILDFLAG(IS_WIN) // Allow NaCl to override the default asan options. extern const char* kAsanDefaultOptionsNaCl; __attribute__((weak)) const char* kAsanDefaultOptionsNaCl = nullptr; @@ -82,13 +72,12 @@ extern char kASanDefaultSuppressions[]; SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_suppressions() { return kASanDefaultSuppressions; } -#endif // defined(OS_LINUX) || defined(OS_CHROMEOS) || defined(OS_APPLE) || - // defined(OS_WIN) +#endif // BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_APPLE) + // || BUILDFLAG(IS_WIN) #endif // ADDRESS_SANITIZER -#if defined(THREAD_SANITIZER) && (defined(OS_LINUX) || defined(OS_CHROMEOS)) +#if defined(THREAD_SANITIZER) && (BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)) // Default options for ThreadSanitizer in various configurations: -// detect_deadlocks=1 - enable deadlock (lock inversion) detection. // second_deadlock_stack=1 - more verbose deadlock reports. // report_signal_unsafe=0 - do not report async-signal-unsafe functions // called from signal handlers. @@ -102,7 +91,7 @@ SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_suppressions() { // external_symbolizer_path=... - provides the path to llvm-symbolizer // relative to the main executable const char kTsanDefaultOptions[] = - "detect_deadlocks=1 second_deadlock_stack=1 report_signal_unsafe=0 " + "second_deadlock_stack=1 report_signal_unsafe=0 " "report_thread_leaks=0 print_suppressions=1 history_size=7 " "strip_path_prefix=/../../ external_symbolizer_path=%d/../../third_party/" "llvm-build/Release+Asserts/bin/llvm-symbolizer"; @@ -117,8 +106,8 @@ SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_suppressions() { return kTSanDefaultSuppressions; } -#endif // defined(THREAD_SANITIZER) && (defined(OS_LINUX) || - // defined(OS_CHROMEOS)) +#endif // defined(THREAD_SANITIZER) && (BUILDFLAG(IS_LINUX) || + // BUILDFLAG(IS_CHROMEOS)) #if defined(MEMORY_SANITIZER) // Default options for MemorySanitizer: @@ -128,12 +117,8 @@ SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_suppressions() { // relative to the main executable const char kMsanDefaultOptions[] = "strip_path_prefix=/../../ " - -#if !defined(OS_APPLE) "external_symbolizer_path=%d/../../third_party/llvm-build/Release+Asserts/" - "bin/llvm-symbolizer" -#endif -; + "bin/llvm-symbolizer"; SANITIZER_HOOK_ATTRIBUTE const char *__msan_default_options() { return kMsanDefaultOptions; @@ -143,15 +128,22 @@ SANITIZER_HOOK_ATTRIBUTE const char *__msan_default_options() { #if defined(LEAK_SANITIZER) // Default options for LeakSanitizer: -// print_suppressions=1 - print the list of matched suppressions. // strip_path_prefix=/../../ - prefixes up to and including this // substring will be stripped from source file paths in symbolized reports. // external_symbolizer_path=... - provides the path to llvm-symbolizer // relative to the main executable +// use_poisoned=1 - Scan poisoned memory. This is useful for Oilpan (C++ +// garbage collection) which wants to exclude its managed memory from being +// reported as leaks (through root regions) and also temporarily poisons +// memory regions before calling destructors of objects to avoid destructors +// cross-referencing memory in other objects. Main thread termination in +// Blink is not graceful and leak checks may be emitted at any time, which +// means that the garbage collector may be in a state with poisoned memory, +// leading to false-positive reports. const char kLsanDefaultOptions[] = - "print_suppressions=1 strip_path_prefix=/../../ " + "strip_path_prefix=/../../ use_poisoned=1 " -#if !defined(OS_APPLE) +#if !BUILDFLAG(IS_FUCHSIA) "external_symbolizer_path=%d/../../third_party/llvm-build/Release+Asserts/" "bin/llvm-symbolizer " #endif @@ -169,12 +161,17 @@ SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_options() { return kLsanDefaultOptions; } +// TODO(https://fxbug.dev/102967): Remove when Fuchsia supports +// module-name-based and function-name-based suppression. +#if !BUILDFLAG(IS_FUCHSIA) + extern char kLSanDefaultSuppressions[]; SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_suppressions() { return kLSanDefaultSuppressions; } +#endif // !BUILDFLAG(IS_FUCHSIA) #endif // LEAK_SANITIZER #if defined(UNDEFINED_SANITIZER) @@ -182,12 +179,8 @@ SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_suppressions() { // print_stacktrace=1 - print the stacktrace when UBSan reports an error. const char kUbsanDefaultOptions[] = "print_stacktrace=1 strip_path_prefix=/../../ " - -#if !defined(OS_APPLE) "external_symbolizer_path=%d/../../third_party/llvm-build/Release+Asserts/" - "bin/llvm-symbolizer" -#endif - ; + "bin/llvm-symbolizer"; SANITIZER_HOOK_ATTRIBUTE const char* __ubsan_default_options() { return kUbsanDefaultOptions; diff --git a/build/sanitizers/tsan_suppressions.cc b/build/sanitizers/tsan_suppressions.cc index 6704a34cdd47..d90546efca41 100644 --- a/build/sanitizers/tsan_suppressions.cc +++ b/build/sanitizers/tsan_suppressions.cc @@ -1,4 +1,4 @@ -// Copyright 2014 The Chromium Authors. All rights reserved. +// Copyright 2014 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -16,7 +16,7 @@ // for the instructions on writing suppressions. char kTSanDefaultSuppressions[] = // False positives in libdbus.so, libdconfsettings.so, libflashplayer.so, - // libgio.so, libglib.so and libgobject.so. + // libgio.so, libglib.so, libgobject.so, and libfontconfig.so.1. // Since we don't instrument them, we cannot reason about the // synchronization in them. "race:libdbus*.so\n" @@ -25,42 +25,18 @@ char kTSanDefaultSuppressions[] = "race:libgio*.so\n" "race:libglib*.so\n" "race:libgobject*.so\n" + "race:libfontconfig.so.1\n" // Intentional race in ToolsSanityTest.DataRace in base_unittests. "race:base/tools_sanity_unittest.cc\n" - // Data race on WatchdogCounter [test-only]. - "race:base/threading/watchdog_unittest.cc\n" - // Data race caused by swapping out the network change notifier with a mock // [test-only]. http://crbug.com/927330. "race:content/browser/net_info_browsertest.cc\n" - // http://crbug.com/120808 - "race:base/threading/watchdog.cc\n" - - // http://crbug.com/157586 - "race:third_party/libvpx/source/libvpx/vp8/decoder/threading.c\n" - // http://crbug.com/244856 "race:libpulsecommon*.so\n" - // http://crbug.com/258479 - "race:g_trace_state\n" - - // http://crbug.com/268924 - "race:base::g_power_monitor\n" - "race:base::PowerMonitor::PowerMonitor\n" - "race:base::PowerMonitor::AddObserver\n" - "race:base::PowerMonitor::RemoveObserver\n" - "race:base::PowerMonitor::IsOnBatteryPower\n" - - // http://crbug.com/272095 - "race:base::g_top_manager\n" - - // http://crbug.com/308590 - "race:CustomThreadWatcher::~CustomThreadWatcher\n" - // http://crbug.com/476529 "deadlock:cc::VideoLayerImpl::WillDraw\n" @@ -70,27 +46,12 @@ char kTSanDefaultSuppressions[] = // http://crbug.com/328868 "race:PR_Lock\n" - // http://crbug.com/348984 - "race:sctp_express_handle_sack\n" - "race:system_base_info\n" - // False positive in libc's tzset_internal, http://crbug.com/379738. "race:tzset_internal\n" // http://crbug.com/380554 "deadlock:g_type_add_interface_static\n" - // http:://crbug.com/386385 - "race:content::AppCacheStorageImpl::DatabaseTask::CallRunCompleted\n" - - // http://crbug.com/397022 - "deadlock:" - "base::trace_event::TraceEventTestFixture_ThreadOnceBlocking_Test::" - "TestBody\n" - - // http://crbug.com/415472 - "deadlock:base::trace_event::TraceLog::GetCategoryGroupEnabled\n" - // Lock inversion in third party code, won't fix. // https://crbug.com/455638 "deadlock:dbus::Bus::ShutdownAndBlock\n" @@ -98,12 +59,6 @@ char kTSanDefaultSuppressions[] = // https://crbug.com/459429 "race:randomnessPid\n" - // http://crbug.com/582274 - "race:usrsctp_close\n" - - // http://crbug.com/633145 - "race:third_party/libjpeg_turbo/simd/jsimd_x86_64.c\n" - // http://crbug.com/691029 "deadlock:libGLX.so*\n" @@ -114,26 +69,26 @@ char kTSanDefaultSuppressions[] = "race:base::i18n::IsRTL\n" "race:base::i18n::SetICUDefaultLocale\n" - // https://crbug.com/794920 - "race:base::debug::SetCrashKeyString\n" - "race:crash_reporter::internal::CrashKeyStringImpl::Set\n" - // http://crbug.com/927330 "race:net::(anonymous namespace)::g_network_change_notifier\n" - // https://crbug.com/965722 - "race:content::(anonymous namespace)::CorruptDBRequestHandler\n" + // Harmless data races, see WTF::StringImpl::Release code comments. + "race:scoped_refptr::AddRef\n" + "race:scoped_refptr::Release\n" - // https://crbug.com/977085 - "race:vp3_update_thread_context\n" + // Harmless data race in ipcz block allocation. See comments in + // ipcz::BlockAllocator::Allocate(). + "race:ipcz::BlockAllocator::Allocate\n" - // Benign data race in libjpeg-turbo, won't fix - // (https://github.com/libjpeg-turbo/libjpeg-turbo/issues/87). - // https://crbug.com/1056011 - "race:third_party/libjpeg_turbo/simd/x86_64/jsimd.c\n" + // https://crbug.com/1405439 + "race:*::perfetto_track_event::internal::g_category_state_storage\n" + "race:perfetto::DataSource*::static_state_\n" + "race:perfetto::*::ResetForTesting\n" - // https://crbug.com/1158622 - "race:absl::synchronization_internal::Waiter::Post\n" + // In V8 each global safepoint might lock isolate mutexes in a different + // order. This is allowed in this context as it is always guarded by a + // single global mutex. + "deadlock:GlobalSafepoint::EnterGlobalSafepointScope\n" // End of suppressions. ; // Please keep this semicolon. diff --git a/build/shim_headers.gni b/build/shim_headers.gni index 0900cba33521..3bef6c0de6ee 100644 --- a/build/shim_headers.gni +++ b/build/shim_headers.gni @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -28,8 +28,10 @@ template("shim_headers") { } args += invoker.headers - outputs = process_file_template(invoker.headers, - "${shim_headers_path}/{{source_file_part}}") + outputs = [] + foreach(h, invoker.headers) { + outputs += [ shim_headers_path + "/" + rebase_path(invoker.root_path,"//") + "/" + h ] + } } group(target_name) { diff --git a/build/skia_gold_common/PRESUBMIT.py b/build/skia_gold_common/PRESUBMIT.py index 41e1bb2f7dee..f3cc772d81cc 100644 --- a/build/skia_gold_common/PRESUBMIT.py +++ b/build/skia_gold_common/PRESUBMIT.py @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Presubmit script for //build/skia_gold_common/. @@ -7,28 +7,33 @@ for more details on the presubmit API built into depot_tools. """ +USE_PYTHON3 = True -def CommonChecks(input_api, output_api): - output = [] +PRESUBMIT_VERSION = '2.0.0' + + +def _GetSkiaGoldEnv(input_api): + """Gets the common environment for running Skia Gold tests.""" build_path = input_api.os_path.join(input_api.PresubmitLocalPath(), '..') skia_gold_env = dict(input_api.environ) skia_gold_env.update({ 'PYTHONPATH': build_path, 'PYTHONDONTWRITEBYTECODE': '1', }) - output.extend( - input_api.canned_checks.RunUnitTestsInDirectory( - input_api, - output_api, - input_api.PresubmitLocalPath(), [r'^.+_unittest\.py$'], - env=skia_gold_env)) - output.extend(input_api.canned_checks.RunPylint(input_api, output_api)) - return output + return skia_gold_env -def CheckChangeOnUpload(input_api, output_api): - return CommonChecks(input_api, output_api) +def CheckSkiaGoldCommonUnittests(input_api, output_api): + """Runs the unittests for the build/skia_gold_common/ directory.""" + return input_api.canned_checks.RunUnitTestsInDirectory( + input_api, + output_api, + input_api.PresubmitLocalPath(), [r'^.+_unittest\.py$'], + env=_GetSkiaGoldEnv(input_api), + run_on_python2=False, + skip_shebang_check=True) -def CheckChangeOnCommit(input_api, output_api): - return CommonChecks(input_api, output_api) +def CheckPylint(input_api, output_api): + """Runs pylint on all directory content and subdirectories.""" + return input_api.canned_checks.RunPylint(input_api, output_api, version='2.7') diff --git a/build/skia_gold_common/__init__.py b/build/skia_gold_common/__init__.py index ae1922e1cc45..7290ec4c7b08 100644 --- a/build/skia_gold_common/__init__.py +++ b/build/skia_gold_common/__init__.py @@ -1,3 +1,3 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/skia_gold_common/output_managerless_skia_gold_session.py b/build/skia_gold_common/output_managerless_skia_gold_session.py index 59e662abbcca..c91222ade8f2 100644 --- a/build/skia_gold_common/output_managerless_skia_gold_session.py +++ b/build/skia_gold_common/output_managerless_skia_gold_session.py @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Implementation of skia_gold_session.py without output managers. @@ -8,38 +8,34 @@ import os import subprocess -import tempfile +import time +from typing import List, Tuple + +import six from skia_gold_common import skia_gold_session class OutputManagerlessSkiaGoldSession(skia_gold_session.SkiaGoldSession): - def RunComparison( # pylint: disable=too-many-arguments - self, - name, - png_file, - output_manager=True, - inexact_matching_args=None, - use_luci=True, - optional_keys=None): + def RunComparison(self, *args, **kwargs) -> skia_gold_session.StepRetVal: # Passing True for the output manager is a bit of a hack, as we don't # actually need an output manager and just need to get past the truthy # check. - return super(OutputManagerlessSkiaGoldSession, self).RunComparison( - name=name, - png_file=png_file, - output_manager=output_manager, - inexact_matching_args=inexact_matching_args, - use_luci=use_luci, - optional_keys=optional_keys) - - def _CreateDiffOutputDir(self): - # We intentionally don't clean this up and don't put it in self._working_dir - # since we need it to stick around after the test completes so the user - # can look at its contents. - return tempfile.mkdtemp() - - def _StoreDiffLinks(self, image_name, _, output_dir): + assert 'output_manager' not in kwargs, 'Cannot specify output_manager' + kwargs['output_manager'] = True + return super().RunComparison(*args, **kwargs) + + def _CreateDiffOutputDir(self, name: str) -> str: + # Do this instead of just making a temporary directory so that it's easier + # for users to look through multiple results. We intentionally do not clean + # this directory up since the user might need to look at it later. + timestamp = int(time.time()) + name = '%s_%d' % (name, timestamp) + filepath = os.path.join(self._local_png_directory, name) + os.makedirs(filepath) + return filepath + + def _StoreDiffLinks(self, image_name: str, _, output_dir: str) -> None: results = self._comparison_results.setdefault(image_name, self.ComparisonResults()) # The directory should contain "input-.png", "closest-.png", @@ -54,9 +50,13 @@ def _StoreDiffLinks(self, image_name, _, output_dir): results.local_diff_diff_image = file_url @staticmethod - def _RunCmdForRcAndOutput(cmd): + def _RunCmdForRcAndOutput(cmd: List[str]) -> Tuple[int, str]: try: - output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) + output = subprocess.check_output(cmd, + stderr=subprocess.STDOUT).decode('utf-8') return 0, output except subprocess.CalledProcessError as e: - return e.returncode, e.output + output = e.output + if not isinstance(output, six.string_types): + output = output.decode('utf-8') + return e.returncode, output diff --git a/build/skia_gold_common/output_managerless_skia_gold_session_unittest.py b/build/skia_gold_common/output_managerless_skia_gold_session_unittest.py index cdd87d960fd3..11e8763083cc 100755 --- a/build/skia_gold_common/output_managerless_skia_gold_session_unittest.py +++ b/build/skia_gold_common/output_managerless_skia_gold_session_unittest.py @@ -1,16 +1,20 @@ #!/usr/bin/env vpython3 -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. #pylint: disable=protected-access import os +import re import sys import tempfile +from typing import Any import unittest -if sys.version_info[0] == 2: +import six + +if six.PY2: import mock else: import unittest.mock as mock @@ -24,20 +28,21 @@ createSkiaGoldArgs = unittest_utils.createSkiaGoldArgs -def assertArgWith(test, arg_list, arg, value): +def assertArgWith(test: unittest.TestCase, arg_list: list, arg: Any, + value: Any) -> None: i = arg_list.index(arg) test.assertEqual(arg_list[i + 1], value) class GpuSkiaGoldSessionDiffTest(fake_filesystem_unittest.TestCase): - def setUp(self): + def setUp(self) -> None: self.setUpPyfakefs() self._working_dir = tempfile.mkdtemp() self._json_keys = tempfile.NamedTemporaryFile(delete=False).name @mock.patch.object(omsgs.OutputManagerlessSkiaGoldSession, '_RunCmdForRcAndOutput') - def test_commandCommonArgs(self, cmd_mock): + def test_commandCommonArgs(self, cmd_mock: mock.MagicMock) -> None: cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False) sgp = skia_gold_properties.SkiaGoldProperties(args) @@ -67,20 +72,48 @@ def test_commandCommonArgs(self, cmd_mock): # directory. self.assertNotIn(self._working_dir, call_args[i + 1]) + @mock.patch.object(omsgs.OutputManagerlessSkiaGoldSession, '_StoreDiffLinks') + @mock.patch.object(omsgs.OutputManagerlessSkiaGoldSession, + '_RunCmdForRcAndOutput') + def test_explicitLocalPngDirectory(self, cmd_mock: mock.MagicMock, _) -> None: + cmd_mock.return_value = (0, '') + if sys.platform == 'win32': + local_png_dir = 'c:\\tmp\\foo' + else: + local_png_dir = '/tmp/foo' + args = createSkiaGoldArgs(git_revision='a', + skia_gold_local_png_write_directory=local_png_dir) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = omsgs.OutputManagerlessSkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + _, _ = session.Diff('name', '', None) + self.assertEqual(cmd_mock.call_count, 1) + if six.PY3: + call_args = cmd_mock.call_args.args[0] + else: + call_args = cmd_mock.call_args[0][0] + self.assertIn('--out-dir', call_args) + output_dir = call_args[call_args.index('--out-dir') + 1] + # Directory should be a subdirectory of the directory we gave and be made + # up of the image name and a timestamp. + parent_dir, sub_dir = output_dir.rsplit(os.sep, 1) + self.assertEqual(parent_dir, local_png_dir) + sub_dir = os.path.normpath(sub_dir) + self.assertIsNotNone(re.match(r'^name_\d+$', sub_dir)) + class OutputManagerlessSkiaGoldSessionStoreDiffLinksTest( fake_filesystem_unittest.TestCase): - def setUp(self): + def setUp(self) -> None: self.setUpPyfakefs() self._working_dir = tempfile.mkdtemp() self._json_keys = tempfile.NamedTemporaryFile(delete=False).name - def test_outputManagerNotNeeded(self): + def test_outputManagerNotNeeded(self) -> None: args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True) sgp = skia_gold_properties.SkiaGoldProperties(args) session = omsgs.OutputManagerlessSkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, - None) + self._json_keys, '', '') input_filepath = os.path.join(self._working_dir, 'input-inputhash.png') with open(input_filepath, 'w') as f: f.write('') diff --git a/build/skia_gold_common/run_pytype.py b/build/skia_gold_common/run_pytype.py new file mode 100755 index 000000000000..ad1829e66125 --- /dev/null +++ b/build/skia_gold_common/run_pytype.py @@ -0,0 +1,44 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Simple helper script to run pytype on Gold Python code.""" + +import os +import sys + +GOLD_DIR = os.path.abspath(os.path.dirname(__file__)) +CHROMIUM_SRC_DIR = os.path.realpath(os.path.join(GOLD_DIR, '..', '..')) + +sys.path.append(os.path.join(CHROMIUM_SRC_DIR, 'testing')) + +from pytype_common import pytype_runner # pylint: disable=wrong-import-position + +EXTRA_PATHS_COMPONENTS = [ + ('build', ), + ('testing', ), +] +EXTRA_PATHS = [ + os.path.join(CHROMIUM_SRC_DIR, *p) for p in EXTRA_PATHS_COMPONENTS +] +EXTRA_PATHS.append(GOLD_DIR) + +FILES_AND_DIRECTORIES_TO_CHECK = [ + '.', +] +FILES_AND_DIRECTORIES_TO_CHECK = [ + os.path.join(GOLD_DIR, f) for f in FILES_AND_DIRECTORIES_TO_CHECK +] + +TEST_NAME = 'gold_common_pytype' +TEST_LOCATION = '//build/skia_gold_common/run_pytype.py' + + +def main() -> int: + return pytype_runner.run_pytype(TEST_NAME, TEST_LOCATION, + FILES_AND_DIRECTORIES_TO_CHECK, EXTRA_PATHS, + GOLD_DIR) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/skia_gold_common/skia_gold_properties.py b/build/skia_gold_common/skia_gold_properties.py index 6c1d22b6be97..91a24cbcb066 100644 --- a/build/skia_gold_common/skia_gold_properties.py +++ b/build/skia_gold_common/skia_gold_properties.py @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Class for storing Skia Gold comparison properties. @@ -9,14 +9,17 @@ * What the continuous integration system is """ +import argparse import logging +import optparse import os -import subprocess -import sys +from typing import Union +ParsedCmdArgs = Union[argparse.Namespace, optparse.Values] -class SkiaGoldProperties(object): - def __init__(self, args): + +class SkiaGoldProperties(): + def __init__(self, args: ParsedCmdArgs): """Abstract class to validate and store properties related to Skia Gold. Args: @@ -28,76 +31,90 @@ def __init__(self, args): self._job_id = None self._local_pixel_tests = None self._no_luci_auth = None + self._service_account = None self._bypass_skia_gold_functionality = None self._code_review_system = None self._continuous_integration_system = None + self._local_png_directory = None self._InitializeProperties(args) - def IsTryjobRun(self): + def IsTryjobRun(self) -> bool: return self.issue is not None @property - def continuous_integration_system(self): + def continuous_integration_system(self) -> str: return self._continuous_integration_system or 'buildbucket' @property - def code_review_system(self): + def code_review_system(self) -> str: return self._code_review_system or 'gerrit' @property - def git_revision(self): + def git_revision(self) -> str: return self._GetGitRevision() @property - def issue(self): + def issue(self) -> int: return self._issue @property - def job_id(self): + def job_id(self) -> str: return self._job_id @property - def local_pixel_tests(self): + def local_pixel_tests(self) -> bool: return self._IsLocalRun() @property - def no_luci_auth(self): + def local_png_directory(self) -> str: + return self._local_png_directory + + @property + def no_luci_auth(self) -> bool: return self._no_luci_auth @property - def patchset(self): + def service_account(self) -> str: + return self._service_account + + @property + def patchset(self) -> int: return self._patchset @property - def bypass_skia_gold_functionality(self): + def bypass_skia_gold_functionality(self) -> bool: return self._bypass_skia_gold_functionality @staticmethod - def _GetGitOriginMasterHeadSha1(): + def _GetGitOriginMainHeadSha1() -> str: raise NotImplementedError() - def _GetGitRevision(self): + def _GetGitRevision(self) -> str: if not self._git_revision: # Automated tests should always pass the revision, so assume we're on # a workstation and try to get the local origin/master HEAD. if not self._IsLocalRun(): raise RuntimeError( '--git-revision was not passed when running on a bot') - revision = self._GetGitOriginMasterHeadSha1() + revision = self._GetGitOriginMainHeadSha1() if not revision or len(revision) != 40: raise RuntimeError( '--git-revision not passed and unable to determine from git') self._git_revision = revision return self._git_revision - def _IsLocalRun(self): + def _IsLocalRun(self) -> bool: if self._local_pixel_tests is None: # Look for the presence of the SWARMING_SERVER environment variable as a # heuristic to determine whether we're running on a workstation or a bot. # This should always be set on swarming, but would be strange to be set on # a workstation. - self._local_pixel_tests = 'SWARMING_SERVER' not in os.environ + # However, since Skylab technically isn't swarming, we need to look for + # an alternative environment variable there. + in_swarming = 'SWARMING_SERVER' in os.environ + in_skylab = bool(int(os.environ.get('RUNNING_IN_SKYLAB', '0'))) + self._local_pixel_tests = not (in_swarming or in_skylab) if self._local_pixel_tests: logging.warning( 'Automatically determined that test is running on a workstation') @@ -106,14 +123,47 @@ def _IsLocalRun(self): 'Automatically determined that test is running on a bot') return self._local_pixel_tests - def _InitializeProperties(self, args): + @staticmethod + def AddCommandLineArguments(parser: argparse.ArgumentParser) -> None: + """ Add command line arguments to an ArgumentParser instance + + Args: + parser: ArgumentParser instance + + Returns: + None + """ + parser.add_argument('--git-revision', type=str, help='Git revision') + parser.add_argument('--gerrit-issue', type=int, help='Gerrit issue number') + parser.add_argument('--gerrit-patchset', + type=int, + help='Gerrit patchset number') + parser.add_argument('--buildbucket-id', + type=int, + help='Buildbucket ID of builder') + parser.add_argument('--code-review-system', + type=str, + help='Code review system') + parser.add_argument('--continuous-integration-system', + type=str, + help='Continuous integration system') + + def _InitializeProperties(self, args: ParsedCmdArgs) -> None: if hasattr(args, 'local_pixel_tests'): # If not set, will be automatically determined later if needed. self._local_pixel_tests = args.local_pixel_tests + if hasattr(args, 'skia_gold_local_png_write_directory'): + self._local_png_directory = args.skia_gold_local_png_write_directory + if hasattr(args, 'no_luci_auth'): self._no_luci_auth = args.no_luci_auth + if hasattr(args, 'service_account'): + self._service_account = args.service_account + if self._service_account: + self._no_luci_auth = True + if hasattr(args, 'bypass_skia_gold_functionality'): self._bypass_skia_gold_functionality = args.bypass_skia_gold_functionality diff --git a/build/skia_gold_common/skia_gold_properties_unittest.py b/build/skia_gold_common/skia_gold_properties_unittest.py index ddbac3277d49..e333e3396858 100755 --- a/build/skia_gold_common/skia_gold_properties_unittest.py +++ b/build/skia_gold_common/skia_gold_properties_unittest.py @@ -1,5 +1,5 @@ #!/usr/bin/env vpython3 -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -23,7 +23,9 @@ class SkiaGoldPropertiesInitializationTest(unittest.TestCase): """Tests that SkiaGoldProperties initializes (or doesn't) when expected.""" - def verifySkiaGoldProperties(self, instance, expected): + def verifySkiaGoldProperties( + self, instance: skia_gold_properties.SkiaGoldProperties, + expected: dict) -> None: self.assertEqual(instance._local_pixel_tests, expected.get('local_pixel_tests')) self.assertEqual(instance._no_luci_auth, expected.get('no_luci_auth')) @@ -38,54 +40,63 @@ def verifySkiaGoldProperties(self, instance, expected): self.assertEqual(instance._bypass_skia_gold_functionality, expected.get('bypass_skia_gold_functionality')) - def test_initializeSkiaGoldAttributes_unsetLocal(self): + def test_initializeSkiaGoldAttributes_unsetLocal(self) -> None: args = createSkiaGoldArgs() sgp = skia_gold_properties.SkiaGoldProperties(args) self.verifySkiaGoldProperties(sgp, {}) - def test_initializeSkiaGoldAttributes_explicitLocal(self): + def test_initializeSkiaGoldAttributes_explicitLocal(self) -> None: args = createSkiaGoldArgs(local_pixel_tests=True) sgp = skia_gold_properties.SkiaGoldProperties(args) self.verifySkiaGoldProperties(sgp, {'local_pixel_tests': True}) - def test_initializeSkiaGoldAttributes_explicitNonLocal(self): + def test_initializeSkiaGoldAttributes_explicitNonLocal(self) -> None: args = createSkiaGoldArgs(local_pixel_tests=False) sgp = skia_gold_properties.SkiaGoldProperties(args) self.verifySkiaGoldProperties(sgp, {'local_pixel_tests': False}) - def test_initializeSkiaGoldAttributes_explicitNoLuciAuth(self): + def test_initializeSkiaGoldAttributes_explicitNoLuciAuth(self) -> None: args = createSkiaGoldArgs(no_luci_auth=True) sgp = skia_gold_properties.SkiaGoldProperties(args) self.verifySkiaGoldProperties(sgp, {'no_luci_auth': True}) - def test_initializeSkiaGoldAttributes_explicitCrs(self): + def test_initializeSkiaGoldAttributes_explicitServiceAccount(self) -> None: + args = createSkiaGoldArgs(service_account='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties(sgp, { + 'service_account': 'a', + 'no_luci_auth': True + }) + + def test_initializeSkiaGoldAttributes_explicitCrs(self) -> None: args = createSkiaGoldArgs(code_review_system='foo') sgp = skia_gold_properties.SkiaGoldProperties(args) self.verifySkiaGoldProperties(sgp, {'code_review_system': 'foo'}) - def test_initializeSkiaGoldAttributes_explicitCis(self): + def test_initializeSkiaGoldAttributes_explicitCis(self) -> None: args = createSkiaGoldArgs(continuous_integration_system='foo') sgp = skia_gold_properties.SkiaGoldProperties(args) self.verifySkiaGoldProperties(sgp, {'continuous_integration_system': 'foo'}) - def test_initializeSkiaGoldAttributes_bypassExplicitTrue(self): + def test_initializeSkiaGoldAttributes_bypassExplicitTrue(self) -> None: args = createSkiaGoldArgs(bypass_skia_gold_functionality=True) sgp = skia_gold_properties.SkiaGoldProperties(args) self.verifySkiaGoldProperties(sgp, {'bypass_skia_gold_functionality': True}) - def test_initializeSkiaGoldAttributes_explicitGitRevision(self): + def test_initializeSkiaGoldAttributes_explicitGitRevision(self) -> None: args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) self.verifySkiaGoldProperties(sgp, {'git_revision': 'a'}) - def test_initializeSkiaGoldAttributes_tryjobArgsIgnoredWithoutRevision(self): + def test_initializeSkiaGoldAttributes_tryjobArgsIgnoredWithoutRevision( + self) -> None: args = createSkiaGoldArgs(gerrit_issue=1, gerrit_patchset=2, buildbucket_id=3) sgp = skia_gold_properties.SkiaGoldProperties(args) self.verifySkiaGoldProperties(sgp, {}) - def test_initializeSkiaGoldAttributes_tryjobArgs(self): + def test_initializeSkiaGoldAttributes_tryjobArgs(self) -> None: args = createSkiaGoldArgs(git_revision='a', gerrit_issue=1, gerrit_patchset=2, @@ -99,14 +110,14 @@ def test_initializeSkiaGoldAttributes_tryjobArgs(self): 'buildbucket_id': 3 }) - def test_initializeSkiaGoldAttributes_tryjobMissingPatchset(self): + def test_initializeSkiaGoldAttributes_tryjobMissingPatchset(self) -> None: args = createSkiaGoldArgs(git_revision='a', gerrit_issue=1, buildbucket_id=3) with self.assertRaises(RuntimeError): skia_gold_properties.SkiaGoldProperties(args) - def test_initializeSkiaGoldAttributes_tryjobMissingBuildbucket(self): + def test_initializeSkiaGoldAttributes_tryjobMissingBuildbucket(self) -> None: args = createSkiaGoldArgs(git_revision='a', gerrit_issue=1, gerrit_patchset=2) @@ -117,24 +128,28 @@ def test_initializeSkiaGoldAttributes_tryjobMissingBuildbucket(self): class SkiaGoldPropertiesCalculationTest(unittest.TestCase): """Tests that SkiaGoldProperties properly calculates certain properties.""" - def testLocalPixelTests_determineTrue(self): + def testLocalPixelTests_determineTrue(self) -> None: args = createSkiaGoldArgs() sgp = skia_gold_properties.SkiaGoldProperties(args) with mock.patch.dict(os.environ, {}, clear=True): self.assertTrue(sgp.local_pixel_tests) + with mock.patch.dict(os.environ, {'RUNNING_IN_SKYLAB': '0'}, clear=True): + self.assertTrue(sgp.local_pixel_tests) - def testLocalPixelTests_determineFalse(self): + def testLocalPixelTests_determineFalse(self) -> None: args = createSkiaGoldArgs() sgp = skia_gold_properties.SkiaGoldProperties(args) with mock.patch.dict(os.environ, {'SWARMING_SERVER': ''}, clear=True): self.assertFalse(sgp.local_pixel_tests) + with mock.patch.dict(os.environ, {'RUNNING_IN_SKYLAB': '1'}, clear=True): + self.assertFalse(sgp.local_pixel_tests) - def testIsTryjobRun_noIssue(self): + def testIsTryjobRun_noIssue(self) -> None: args = createSkiaGoldArgs() sgp = skia_gold_properties.SkiaGoldProperties(args) self.assertFalse(sgp.IsTryjobRun()) - def testIsTryjobRun_issue(self): + def testIsTryjobRun_issue(self) -> None: args = createSkiaGoldArgs(git_revision='a', gerrit_issue=1, gerrit_patchset=2, @@ -142,42 +157,42 @@ def testIsTryjobRun_issue(self): sgp = skia_gold_properties.SkiaGoldProperties(args) self.assertTrue(sgp.IsTryjobRun()) - def testGetGitRevision_revisionSet(self): + def testGetGitRevision_revisionSet(self) -> None: args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) self.assertEqual(sgp.git_revision, 'a') - def testGetGitRevision_findValidRevision(self): + def testGetGitRevision_findValidRevision(self) -> None: args = createSkiaGoldArgs(local_pixel_tests=True) sgp = skia_gold_properties.SkiaGoldProperties(args) with mock.patch.object(skia_gold_properties.SkiaGoldProperties, - '_GetGitOriginMasterHeadSha1') as patched_head: + '_GetGitOriginMainHeadSha1') as patched_head: expected = 'a' * 40 patched_head.return_value = expected self.assertEqual(sgp.git_revision, expected) # Should be cached. self.assertEqual(sgp._git_revision, expected) - def testGetGitRevision_noExplicitOnBot(self): + def testGetGitRevision_noExplicitOnBot(self) -> None: args = createSkiaGoldArgs(local_pixel_tests=False) sgp = skia_gold_properties.SkiaGoldProperties(args) with self.assertRaises(RuntimeError): _ = sgp.git_revision - def testGetGitRevision_findEmptyRevision(self): + def testGetGitRevision_findEmptyRevision(self) -> None: args = createSkiaGoldArgs(local_pixel_tests=True) sgp = skia_gold_properties.SkiaGoldProperties(args) with mock.patch.object(skia_gold_properties.SkiaGoldProperties, - '_GetGitOriginMasterHeadSha1') as patched_head: + '_GetGitOriginMainHeadSha1') as patched_head: patched_head.return_value = '' with self.assertRaises(RuntimeError): _ = sgp.git_revision - def testGetGitRevision_findMalformedRevision(self): + def testGetGitRevision_findMalformedRevision(self) -> None: args = createSkiaGoldArgs(local_pixel_tests=True) sgp = skia_gold_properties.SkiaGoldProperties(args) with mock.patch.object(skia_gold_properties.SkiaGoldProperties, - '_GetGitOriginMasterHeadSha1') as patched_head: + '_GetGitOriginMainHeadSha1') as patched_head: patched_head.return_value = 'a' * 39 with self.assertRaises(RuntimeError): _ = sgp.git_revision diff --git a/build/skia_gold_common/skia_gold_session.py b/build/skia_gold_common/skia_gold_session.py index 7df09174dcb0..b0b54a2b3ee3 100644 --- a/build/skia_gold_common/skia_gold_session.py +++ b/build/skia_gold_common/skia_gold_session.py @@ -1,14 +1,18 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Class for interacting with the Skia Gold image diffing service.""" import logging import os +import platform import shutil -import subprocess import sys import tempfile +import time +from typing import Any, Dict, List, Optional, Tuple + +from skia_gold_common import skia_gold_properties CHROMIUM_SRC = os.path.realpath( os.path.join(os.path.dirname(__file__), '..', '..')) @@ -17,13 +21,20 @@ if sys.platform == 'win32': GOLDCTL_BINARY = os.path.join(GOLDCTL_BINARY, 'win', 'goldctl') + '.exe' elif sys.platform == 'darwin': - GOLDCTL_BINARY = os.path.join(GOLDCTL_BINARY, 'mac', 'goldctl') + machine = platform.machine().lower() + if any(machine.startswith(m) for m in ('arm64', 'aarch64')): + GOLDCTL_BINARY = os.path.join(GOLDCTL_BINARY, 'mac_arm64', 'goldctl') + else: + GOLDCTL_BINARY = os.path.join(GOLDCTL_BINARY, 'mac_amd64', 'goldctl') else: GOLDCTL_BINARY = os.path.join(GOLDCTL_BINARY, 'linux', 'goldctl') -class SkiaGoldSession(object): - class StatusCodes(object): +StepRetVal = Tuple[int, Optional[str]] + + +class SkiaGoldSession(): + class StatusCodes(): """Status codes for RunComparison.""" SUCCESS = 0 AUTH_FAILURE = 1 @@ -33,24 +44,24 @@ class StatusCodes(object): LOCAL_DIFF_FAILURE = 5 NO_OUTPUT_MANAGER = 6 - class ComparisonResults(object): + class ComparisonResults(): """Struct-like object for storing results of an image comparison.""" def __init__(self): - self.public_triage_link = None - self.internal_triage_link = None - self.triage_link_omission_reason = None - self.local_diff_given_image = None - self.local_diff_closest_image = None - self.local_diff_diff_image = None + self.public_triage_link: Optional[str] = None + self.internal_triage_link: Optional[str] = None + self.triage_link_omission_reason: Optional[str] = None + self.local_diff_given_image: Optional[str] = None + self.local_diff_closest_image: Optional[str] = None + self.local_diff_diff_image: Optional[str] = None def __init__(self, - working_dir, - gold_properties, - keys_file, - corpus, - instance, - bucket=None): + working_dir: str, + gold_properties: skia_gold_properties.SkiaGoldProperties, + keys_file: str, + corpus: str, + instance: str, + bucket: Optional[str] = None): """Abstract class to handle all aspects of image comparison via Skia Gold. A single SkiaGoldSession is valid for a single instance/corpus/keys_file @@ -73,9 +84,12 @@ def __init__(self, self._corpus = corpus self._instance = instance self._bucket = bucket - self._triage_link_file = tempfile.NamedTemporaryFile(suffix='.txt', - dir=working_dir, - delete=False).name + self._local_png_directory = (self._gold_properties.local_png_directory + or tempfile.mkdtemp()) + with tempfile.NamedTemporaryFile(suffix='.txt', + dir=working_dir, + delete=False) as triage_link_file: + self._triage_link_file = triage_link_file.name # A map of image name (string) to ComparisonResults for that image. self._comparison_results = {} self._authenticated = False @@ -87,12 +101,14 @@ def __init__(self, shutil.copy(keys_file, self._keys_file) def RunComparison(self, - name, - png_file, - output_manager, - inexact_matching_args=None, - use_luci=True, - optional_keys=None): + name: str, + png_file: str, + output_manager: Any, + inexact_matching_args: Optional[List[str]] = None, + use_luci: bool = True, + service_account: Optional[str] = None, + optional_keys: Optional[Dict[str, str]] = None, + force_dryrun: bool = False) -> StepRetVal: """Helper method to run all steps to compare a produced image. Handles authentication, itnitialization, comparison, and, if necessary, @@ -111,17 +127,22 @@ def RunComparison(self, use_luci: If true, authentication will use the service account provided by the LUCI context. If false, will attempt to use whatever is set up in gsutil, which is only supported for local runs. + service_account: If set, uses the provided service account instead of + LUCI_CONTEXT or whatever is set in gsutil. optional_keys: A dict containing optional key/value pairs to pass to Gold for this comparison. Optional keys are keys unrelated to the configuration the image was produced on, e.g. a comment or whether Gold should treat the image as ignored. + force_dryrun: A boolean denoting whether dryrun should be forced on + regardless of whether this is a local comparison or not. Returns: A tuple (status, error). |status| is a value from SkiaGoldSession.StatusCodes signifying the result of the comparison. |error| is an error message describing the status if not successful. """ - auth_rc, auth_stdout = self.Authenticate(use_luci=use_luci) + auth_rc, auth_stdout = self.Authenticate(use_luci=use_luci, + service_account=service_account) if auth_rc: return self.StatusCodes.AUTH_FAILURE, auth_stdout @@ -133,7 +154,8 @@ def RunComparison(self, name=name, png_file=png_file, inexact_matching_args=inexact_matching_args, - optional_keys=optional_keys) + optional_keys=optional_keys, + force_dryrun=force_dryrun) if not compare_rc: return self.StatusCodes.SUCCESS, None @@ -152,13 +174,17 @@ def RunComparison(self, return self.StatusCodes.LOCAL_DIFF_FAILURE, diff_stdout return self.StatusCodes.COMPARISON_FAILURE_LOCAL, compare_stdout - def Authenticate(self, use_luci=True): + def Authenticate(self, + use_luci: bool = True, + service_account: Optional[str] = None) -> StepRetVal: """Authenticates with Skia Gold for this session. Args: use_luci: If true, authentication will use the service account provided by the LUCI context. If false, will attempt to use whatever is set up in gsutil, which is only supported for local runs. + service_account: If set, uses the provided service account instead of + LUCI_CONTEXT or whatever is set in gsutil. Returns: A tuple (return_code, output). |return_code| is the return code of the @@ -171,21 +197,24 @@ def Authenticate(self, use_luci=True): logging.warning('Not actually authenticating with Gold due to ' '--bypass-skia-gold-functionality being present.') return 0, None + assert not (use_luci and service_account) auth_cmd = [GOLDCTL_BINARY, 'auth', '--work-dir', self._working_dir] if use_luci: auth_cmd.append('--luci') + elif service_account: + auth_cmd.extend(['--service-account', service_account]) elif not self._gold_properties.local_pixel_tests: raise RuntimeError( - 'Cannot authenticate to Skia Gold with use_luci=False unless running ' - 'local pixel tests') + 'Cannot authenticate to Skia Gold with use_luci=False without a ' + 'service account unless running local pixel tests') rc, stdout = self._RunCmdForRcAndOutput(auth_cmd) if rc == 0: self._authenticated = True return rc, stdout - def Initialize(self): + def Initialize(self) -> StepRetVal: """Initializes the working directory if necessary. This can technically be skipped if the same information is passed to the @@ -245,10 +274,11 @@ def Initialize(self): return rc, stdout def Compare(self, - name, - png_file, - inexact_matching_args=None, - optional_keys=None): + name: str, + png_file: str, + inexact_matching_args: Optional[List[str]] = None, + optional_keys: Optional[Dict[str, str]] = None, + force_dryrun: bool = False) -> StepRetVal: """Compares the given image to images known to Gold. Triage links can later be retrieved using GetTriageLinks(). @@ -263,6 +293,8 @@ def Compare(self, for this comparison. Optional keys are keys unrelated to the configuration the image was produced on, e.g. a comment or whether Gold should treat the image as ignored. + force_dryrun: A boolean denoting whether dryrun should be forced on + regardless of whether this is a local comparison or not. Returns: A tuple (return_code, output). |return_code| is the return code of the @@ -285,7 +317,7 @@ def Compare(self, '--work-dir', self._working_dir, ] - if self._gold_properties.local_pixel_tests: + if self._gold_properties.local_pixel_tests or force_dryrun: compare_cmd.append('--dryrun') if inexact_matching_args: logging.info('Using inexact matching arguments for image %s: %s', name, @@ -334,7 +366,7 @@ def Compare(self, 'Failed to read triage link from file') return rc, stdout - def Diff(self, name, png_file, output_manager): + def Diff(self, name: str, png_file: str, output_manager: Any) -> StepRetVal: """Performs a local image diff against the closest known positive in Gold. This is used for running tests on a workstation, where uploading data to @@ -361,7 +393,7 @@ def Diff(self, name, png_file, output_manager): '--bypass-skia-gold-functionality is not supported when running ' 'tests locally.') - output_dir = self._CreateDiffOutputDir() + output_dir = self._CreateDiffOutputDir(name) # TODO(skbug.com/10611): Remove this temporary work dir and instead just use # self._working_dir once `goldctl diff` stops clobbering the auth files in # the provided work directory. @@ -393,7 +425,7 @@ def Diff(self, name, png_file, output_manager): finally: shutil.rmtree(os.path.realpath(os.path.join(temp_work_dir, '..'))) - def GetTriageLinks(self, name): + def GetTriageLinks(self, name: str) -> Tuple[str, str]: """Gets the triage links for the given image. Args: @@ -411,7 +443,7 @@ def GetTriageLinks(self, name): return (comparison_results.public_triage_link, comparison_results.internal_triage_link) - def GetTriageLinkOmissionReason(self, name): + def GetTriageLinkOmissionReason(self, name: str) -> str: """Gets the reason why a triage link is not available for an image. Args: @@ -434,7 +466,7 @@ def GetTriageLinkOmissionReason(self, name): 'Somehow have a ComparisonResults instance for %s that should not ' 'exist' % name) - def GetGivenImageLink(self, name): + def GetGivenImageLink(self, name: str) -> str: """Gets the link to the given image used for local diffing. Args: @@ -447,7 +479,7 @@ def GetGivenImageLink(self, name): assert name in self._comparison_results return self._comparison_results[name].local_diff_given_image - def GetClosestImageLink(self, name): + def GetClosestImageLink(self, name: str) -> str: """Gets the link to the closest known image used for local diffing. Args: @@ -460,7 +492,7 @@ def GetClosestImageLink(self, name): assert name in self._comparison_results return self._comparison_results[name].local_diff_closest_image - def GetDiffImageLink(self, name): + def GetDiffImageLink(self, name: str) -> str: """Gets the link to the diff between the given and closest images. Args: @@ -473,7 +505,7 @@ def GetDiffImageLink(self, name): assert name in self._comparison_results return self._comparison_results[name].local_diff_diff_image - def _GeneratePublicTriageLink(self, internal_link): + def _GeneratePublicTriageLink(self, internal_link: str) -> str: """Generates a public triage link given an internal one. Args: @@ -487,7 +519,7 @@ def _GeneratePublicTriageLink(self, internal_link): return internal_link.replace('%s-gold' % self._instance, '%s-public-gold' % self._instance) - def _ClearTriageLinkFile(self): + def _ClearTriageLinkFile(self) -> None: """Clears the contents of the triage link file. This should be done before every comparison since goldctl appends to the @@ -496,10 +528,13 @@ def _ClearTriageLinkFile(self): """ open(self._triage_link_file, 'w').close() - def _CreateDiffOutputDir(self): + def _CreateDiffOutputDir(self, _name: str) -> str: + # We don't use self._local_png_directory here since we want it to be + # automatically cleaned up with the working directory. Any subclasses that + # want to keep it around can override this method. return tempfile.mkdtemp(dir=self._working_dir) - def _GetDiffGoldInstance(self): + def _GetDiffGoldInstance(self) -> str: """Gets the Skia Gold instance to use for the Diff step. This can differ based on how a particular instance is set up, mainly @@ -510,7 +545,8 @@ def _GetDiffGoldInstance(self): # instance. return str(self._instance) + '-public' - def _StoreDiffLinks(self, image_name, output_manager, output_dir): + def _StoreDiffLinks(self, image_name: str, output_manager: Any, + output_dir: str) -> None: """Stores the local diff files as links. The ComparisonResults entry for |image_name| should have its *_image fields @@ -527,7 +563,7 @@ def _StoreDiffLinks(self, image_name, output_manager, output_dir): raise NotImplementedError() @staticmethod - def _RunCmdForRcAndOutput(cmd): + def _RunCmdForRcAndOutput(cmd: List[str]) -> Tuple[int, str]: """Runs |cmd| and returns its returncode and output. Args: diff --git a/build/skia_gold_common/skia_gold_session_manager.py b/build/skia_gold_common/skia_gold_session_manager.py index d4166e1dc29f..976a72ed5147 100644 --- a/build/skia_gold_common/skia_gold_session_manager.py +++ b/build/skia_gold_common/skia_gold_session_manager.py @@ -1,14 +1,21 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Class for managing multiple SkiaGoldSessions.""" import json import tempfile +from typing import Optional, Type, Union +from skia_gold_common import skia_gold_properties +from skia_gold_common import skia_gold_session -class SkiaGoldSessionManager(object): - def __init__(self, working_dir, gold_properties): +KeysInputType = Union[dict, str] + + +class SkiaGoldSessionManager(): + def __init__(self, working_dir: str, + gold_properties: skia_gold_properties.SkiaGoldProperties): """Abstract class to manage one or more skia_gold_session.SkiaGoldSessions. A separate session is required for each instance/corpus/keys_file @@ -25,10 +32,10 @@ def __init__(self, working_dir, gold_properties): self._sessions = {} def GetSkiaGoldSession(self, - keys_input, - corpus=None, - instance=None, - bucket=None): + keys_input: KeysInputType, + corpus: Optional[str] = None, + instance: Optional[str] = None, + bucket: Optional[str] = None): """Gets a SkiaGoldSession for the given arguments. Lazily creates one if necessary. @@ -64,7 +71,7 @@ def GetSkiaGoldSession(self, return session @staticmethod - def _GetDefaultInstance(): + def _GetDefaultInstance() -> str: """Gets the default Skia Gold instance. Returns: @@ -73,7 +80,7 @@ def _GetDefaultInstance(): return 'chrome' @staticmethod - def GetSessionClass(): + def GetSessionClass() -> Type[skia_gold_session.SkiaGoldSession]: """Gets the SkiaGoldSession class to use for session creation. Returns: @@ -82,7 +89,7 @@ def GetSessionClass(): raise NotImplementedError -def _GetKeysAsDict(keys_input): +def _GetKeysAsDict(keys_input: KeysInputType) -> dict: """Converts |keys_input| into a dictionary. Args: @@ -99,12 +106,14 @@ def _GetKeysAsDict(keys_input): return json.load(f) -def _GetKeysAsJson(keys_input, session_work_dir): +def _GetKeysAsJson(keys_input: KeysInputType, session_work_dir: str) -> str: """Converts |keys_input| into a JSON file on disk. Args: keys_input: A dictionary or a string pointing to a JSON file. The contents of either should be Skia Gold config data. + session_work_dir: The working directory under which each individual + SkiaGoldSessions' working directory will be created. Returns: A string containing a filepath to a JSON file with containing |keys_input|'s diff --git a/build/skia_gold_common/skia_gold_session_manager_unittest.py b/build/skia_gold_common/skia_gold_session_manager_unittest.py index 286fdf2b9705..5752ab278a68 100755 --- a/build/skia_gold_common/skia_gold_session_manager_unittest.py +++ b/build/skia_gold_common/skia_gold_session_manager_unittest.py @@ -1,5 +1,5 @@ #!/usr/bin/env vpython3 -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -9,6 +9,7 @@ import os import sys import tempfile +import typing import unittest if sys.version_info[0] == 2: @@ -29,7 +30,7 @@ class SkiaGoldSessionManagerGetSessionTest(fake_filesystem_unittest.TestCase): """Tests the functionality of SkiaGoldSessionManager.GetSkiaGoldSession.""" - def setUp(self): + def setUp(self) -> None: self.setUpPyfakefs() self._working_dir = tempfile.mkdtemp() self._patcher = mock.patch.object( @@ -38,7 +39,7 @@ def setUp(self): self._session_class_mock.return_value = skia_gold_session.SkiaGoldSession self.addCleanup(self._patcher.stop) - def test_ArgsForwardedToSession(self): + def test_ArgsForwardedToSession(self) -> None: args = createSkiaGoldArgs() sgp = skia_gold_properties.SkiaGoldProperties(args) sgsm = skia_gold_session_manager.SkiaGoldSessionManager( @@ -51,7 +52,7 @@ def test_ArgsForwardedToSession(self): # manager's working directory. self.assertEqual(os.path.dirname(session._working_dir), self._working_dir) - def test_corpusFromJson(self): + def test_corpusFromJson(self) -> None: args = createSkiaGoldArgs() sgp = skia_gold_properties.SkiaGoldProperties(args) sgsm = skia_gold_session_manager.SkiaGoldSessionManager( @@ -62,7 +63,7 @@ def test_corpusFromJson(self): self.assertEqual(session._corpus, 'foobar') self.assertEqual(session._instance, 'instance') - def test_corpusDefaultsToInstance(self): + def test_corpusDefaultsToInstance(self) -> None: args = createSkiaGoldArgs() sgp = skia_gold_properties.SkiaGoldProperties(args) sgsm = skia_gold_session_manager.SkiaGoldSessionManager( @@ -74,7 +75,8 @@ def test_corpusDefaultsToInstance(self): @mock.patch.object(skia_gold_session_manager.SkiaGoldSessionManager, '_GetDefaultInstance') - def test_getDefaultInstance(self, default_instance_mock): + def test_getDefaultInstance(self, + default_instance_mock: mock.MagicMock) -> None: default_instance_mock.return_value = 'default' args = createSkiaGoldArgs() sgp = skia_gold_properties.SkiaGoldProperties(args) @@ -86,7 +88,7 @@ def test_getDefaultInstance(self, default_instance_mock): self.assertEqual(session._instance, 'default') @mock.patch.object(skia_gold_session.SkiaGoldSession, '__init__') - def test_matchingSessionReused(self, session_mock): + def test_matchingSessionReused(self, session_mock: mock.MagicMock) -> None: session_mock.return_value = None args = createSkiaGoldArgs() sgp = skia_gold_properties.SkiaGoldProperties(args) @@ -100,7 +102,7 @@ def test_matchingSessionReused(self, session_mock): self.assertEqual(session_mock.call_count, 1) @mock.patch.object(skia_gold_session.SkiaGoldSession, '__init__') - def test_separateSessionsFromKeys(self, session_mock): + def test_separateSessionsFromKeys(self, session_mock: mock.MagicMock) -> None: session_mock.return_value = None args = createSkiaGoldArgs() sgp = skia_gold_properties.SkiaGoldProperties(args) @@ -113,7 +115,8 @@ def test_separateSessionsFromKeys(self, session_mock): self.assertEqual(session_mock.call_count, 2) @mock.patch.object(skia_gold_session.SkiaGoldSession, '__init__') - def test_separateSessionsFromCorpus(self, session_mock): + def test_separateSessionsFromCorpus(self, + session_mock: mock.MagicMock) -> None: session_mock.return_value = None args = createSkiaGoldArgs() sgp = skia_gold_properties.SkiaGoldProperties(args) @@ -125,7 +128,8 @@ def test_separateSessionsFromCorpus(self, session_mock): self.assertEqual(session_mock.call_count, 2) @mock.patch.object(skia_gold_session.SkiaGoldSession, '__init__') - def test_separateSessionsFromInstance(self, session_mock): + def test_separateSessionsFromInstance(self, + session_mock: mock.MagicMock) -> None: session_mock.return_value = None args = createSkiaGoldArgs() sgp = skia_gold_properties.SkiaGoldProperties(args) @@ -140,11 +144,11 @@ def test_separateSessionsFromInstance(self, session_mock): class SkiaGoldSessionManagerKeyConversionTest(fake_filesystem_unittest.TestCase ): - def setUp(self): + def setUp(self) -> None: self.setUpPyfakefs() self._working_dir = tempfile.mkdtemp() - def test_getKeysAsDict(self): + def test_getKeysAsDict(self) -> None: keys_dict = {'foo': 'bar'} keys_file_contents = {'bar': 'baz'} keys_file = tempfile.NamedTemporaryFile(delete=False).name @@ -156,16 +160,16 @@ def test_getKeysAsDict(self): self.assertEqual(skia_gold_session_manager._GetKeysAsDict(keys_file), keys_file_contents) with self.assertRaises(AssertionError): - skia_gold_session_manager._GetKeysAsDict(1) + skia_gold_session_manager._GetKeysAsDict(typing.cast(dict, 1)) - def test_getKeysAsJson(self): + def test_getKeysAsJson(self) -> None: keys_dict = {'foo': 'bar'} keys_file_contents = {'bar': 'baz'} keys_file = tempfile.NamedTemporaryFile(delete=False).name with open(keys_file, 'w') as f: json.dump(keys_file_contents, f) - self.assertEqual(skia_gold_session_manager._GetKeysAsJson(keys_file, None), + self.assertEqual(skia_gold_session_manager._GetKeysAsJson(keys_file, ''), keys_file) keys_dict_as_json = skia_gold_session_manager._GetKeysAsJson( keys_dict, self._working_dir) @@ -173,7 +177,7 @@ def test_getKeysAsJson(self): with open(keys_dict_as_json) as f: self.assertEqual(json.load(f), keys_dict) with self.assertRaises(AssertionError): - skia_gold_session_manager._GetKeysAsJson(1, None) + skia_gold_session_manager._GetKeysAsJson(typing.cast(dict, 1), '') if __name__ == '__main__': diff --git a/build/skia_gold_common/skia_gold_session_unittest.py b/build/skia_gold_common/skia_gold_session_unittest.py index f5c97b5d72bb..de104f97ea02 100755 --- a/build/skia_gold_common/skia_gold_session_unittest.py +++ b/build/skia_gold_common/skia_gold_session_unittest.py @@ -1,5 +1,5 @@ #!/usr/bin/env vpython3 -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -9,6 +9,7 @@ import os import sys import tempfile +from typing import Any import unittest if sys.version_info[0] == 2: @@ -25,7 +26,8 @@ createSkiaGoldArgs = unittest_utils.createSkiaGoldArgs -def assertArgWith(test, arg_list, arg, value): +def assertArgWith(test: unittest.TestCase, arg_list: list, arg: Any, + value: Any) -> None: i = arg_list.index(arg) test.assertEqual(arg_list[i + 1], value) @@ -33,329 +35,348 @@ def assertArgWith(test, arg_list, arg, value): class SkiaGoldSessionRunComparisonTest(fake_filesystem_unittest.TestCase): """Tests the functionality of SkiaGoldSession.RunComparison.""" - def setUp(self): + def setUp(self) -> None: self.setUpPyfakefs() self._working_dir = tempfile.mkdtemp() self._json_keys = tempfile.NamedTemporaryFile(delete=False).name with open(self._json_keys, 'w') as f: json.dump({}, f) - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate') - def test_comparisonSuccess(self, auth_mock, init_mock, compare_mock, - diff_mock): - auth_mock.return_value = (0, None) - init_mock.return_value = (0, None) - compare_mock.return_value = (0, None) - session = skia_gold_session.SkiaGoldSession(self._working_dir, None, - self._json_keys, None, None) - status, _ = session.RunComparison(None, None, None) + self.auth_patcher = mock.patch.object(skia_gold_session.SkiaGoldSession, + 'Authenticate') + self.init_patcher = mock.patch.object(skia_gold_session.SkiaGoldSession, + 'Initialize') + self.compare_patcher = mock.patch.object(skia_gold_session.SkiaGoldSession, + 'Compare') + self.diff_patcher = mock.patch.object(skia_gold_session.SkiaGoldSession, + 'Diff') + + self.auth_mock = self.auth_patcher.start() + self.init_mock = self.init_patcher.start() + self.compare_mock = self.compare_patcher.start() + self.diff_mock = self.diff_patcher.start() + + self.addCleanup(self.auth_patcher.stop) + self.addCleanup(self.init_patcher.stop) + self.addCleanup(self.compare_patcher.stop) + self.addCleanup(self.diff_patcher.stop) + + def test_comparisonSuccess(self) -> None: + self.auth_mock.return_value = (0, None) + self.init_mock.return_value = (0, None) + self.compare_mock.return_value = (0, None) + sgp = skia_gold_properties.SkiaGoldProperties(createSkiaGoldArgs()) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + status, _ = session.RunComparison('', '', None) self.assertEqual(status, skia_gold_session.SkiaGoldSession.StatusCodes.SUCCESS) - self.assertEqual(auth_mock.call_count, 1) - self.assertEqual(init_mock.call_count, 1) - self.assertEqual(compare_mock.call_count, 1) - self.assertEqual(diff_mock.call_count, 0) - - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate') - def test_authFailure(self, auth_mock, init_mock, compare_mock, diff_mock): - auth_mock.return_value = (1, 'Auth failed') - session = skia_gold_session.SkiaGoldSession(self._working_dir, None, - self._json_keys, None, None) - status, error = session.RunComparison(None, None, None) + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.init_mock.call_count, 1) + self.assertEqual(self.compare_mock.call_count, 1) + self.assertEqual(self.diff_mock.call_count, 0) + + def test_authFailure(self) -> None: + self.auth_mock.return_value = (1, 'Auth failed') + sgp = skia_gold_properties.SkiaGoldProperties(createSkiaGoldArgs()) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + status, error = session.RunComparison('', '', None) self.assertEqual(status, skia_gold_session.SkiaGoldSession.StatusCodes.AUTH_FAILURE) self.assertEqual(error, 'Auth failed') - self.assertEqual(auth_mock.call_count, 1) - self.assertEqual(init_mock.call_count, 0) - self.assertEqual(compare_mock.call_count, 0) - self.assertEqual(diff_mock.call_count, 0) - - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate') - def test_initFailure(self, auth_mock, init_mock, compare_mock, diff_mock): - auth_mock.return_value = (0, None) - init_mock.return_value = (1, 'Init failed') - session = skia_gold_session.SkiaGoldSession(self._working_dir, None, - self._json_keys, None, None) - status, error = session.RunComparison(None, None, None) + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.init_mock.call_count, 0) + self.assertEqual(self.compare_mock.call_count, 0) + self.assertEqual(self.diff_mock.call_count, 0) + + def test_initFailure(self) -> None: + self.auth_mock.return_value = (0, None) + self.init_mock.return_value = (1, 'Init failed') + sgp = skia_gold_properties.SkiaGoldProperties(createSkiaGoldArgs()) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + status, error = session.RunComparison('', '', None) self.assertEqual(status, skia_gold_session.SkiaGoldSession.StatusCodes.INIT_FAILURE) self.assertEqual(error, 'Init failed') - self.assertEqual(auth_mock.call_count, 1) - self.assertEqual(init_mock.call_count, 1) - self.assertEqual(compare_mock.call_count, 0) - self.assertEqual(diff_mock.call_count, 0) - - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate') - def test_compareFailureRemote(self, auth_mock, init_mock, compare_mock, - diff_mock): - auth_mock.return_value = (0, None) - init_mock.return_value = (0, None) - compare_mock.return_value = (1, 'Compare failed') + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.init_mock.call_count, 1) + self.assertEqual(self.compare_mock.call_count, 0) + self.assertEqual(self.diff_mock.call_count, 0) + + def test_compareFailureRemote(self) -> None: + self.auth_mock.return_value = (0, None) + self.init_mock.return_value = (0, None) + self.compare_mock.return_value = (1, 'Compare failed') args = createSkiaGoldArgs(local_pixel_tests=False) sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) - status, error = session.RunComparison(None, None, None) + self._json_keys, '', '') + status, error = session.RunComparison('', '', None) self.assertEqual( status, skia_gold_session.SkiaGoldSession.StatusCodes.COMPARISON_FAILURE_REMOTE) self.assertEqual(error, 'Compare failed') - self.assertEqual(auth_mock.call_count, 1) - self.assertEqual(init_mock.call_count, 1) - self.assertEqual(compare_mock.call_count, 1) - self.assertEqual(diff_mock.call_count, 0) - - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate') - def test_compareFailureLocal(self, auth_mock, init_mock, compare_mock, - diff_mock): - auth_mock.return_value = (0, None) - init_mock.return_value = (0, None) - compare_mock.return_value = (1, 'Compare failed') - diff_mock.return_value = (0, None) + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.init_mock.call_count, 1) + self.assertEqual(self.compare_mock.call_count, 1) + self.assertEqual(self.diff_mock.call_count, 0) + + def test_compareFailureLocal(self) -> None: + self.auth_mock.return_value = (0, None) + self.init_mock.return_value = (0, None) + self.compare_mock.return_value = (1, 'Compare failed') + self.diff_mock.return_value = (0, None) args = createSkiaGoldArgs(local_pixel_tests=True) sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) - status, error = session.RunComparison(None, None, + self._json_keys, '', '') + status, error = session.RunComparison('', '', 'Definitely an output manager') self.assertEqual( status, skia_gold_session.SkiaGoldSession.StatusCodes.COMPARISON_FAILURE_LOCAL) self.assertEqual(error, 'Compare failed') - self.assertEqual(auth_mock.call_count, 1) - self.assertEqual(init_mock.call_count, 1) - self.assertEqual(compare_mock.call_count, 1) - self.assertEqual(diff_mock.call_count, 1) - - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate') - def test_compareInexactMatching(self, auth_mock, init_mock, compare_mock, - diff_mock): - auth_mock.return_value = (0, None) - init_mock.return_value = (0, None) - compare_mock.return_value = (0, None) - diff_mock.return_value = (0, None) + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.init_mock.call_count, 1) + self.assertEqual(self.compare_mock.call_count, 1) + self.assertEqual(self.diff_mock.call_count, 1) + + def test_compareInexactMatching(self) -> None: + self.auth_mock.return_value = (0, None) + self.init_mock.return_value = (0, None) + self.compare_mock.return_value = (0, None) + self.diff_mock.return_value = (0, None) args = createSkiaGoldArgs(local_pixel_tests=False) sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) - status, _ = session.RunComparison(None, - None, + self._json_keys, '', '') + status, _ = session.RunComparison('', + '', None, inexact_matching_args=['--inexact']) self.assertEqual(status, skia_gold_session.SkiaGoldSession.StatusCodes.SUCCESS) - self.assertEqual(auth_mock.call_count, 1) - self.assertEqual(init_mock.call_count, 1) - self.assertEqual(compare_mock.call_count, 1) - self.assertEqual(diff_mock.call_count, 0) - compare_mock.assert_called_with(name=None, - png_file=mock.ANY, - inexact_matching_args=['--inexact'], - optional_keys=None) - - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate') - def test_compareOptionalKeys(self, auth_mock, init_mock, compare_mock, - diff_mock): - auth_mock.return_value = (0, None) - init_mock.return_value = (0, None) - compare_mock.return_value = (0, None) - diff_mock.return_value = (0, None) + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.init_mock.call_count, 1) + self.assertEqual(self.compare_mock.call_count, 1) + self.assertEqual(self.diff_mock.call_count, 0) + self.compare_mock.assert_called_with(name='', + png_file=mock.ANY, + inexact_matching_args=['--inexact'], + optional_keys=None, + force_dryrun=False) + + def test_compareOptionalKeys(self) -> None: + self.auth_mock.return_value = (0, None) + self.init_mock.return_value = (0, None) + self.compare_mock.return_value = (0, None) + self.diff_mock.return_value = (0, None) args = createSkiaGoldArgs(local_pixel_tests=False) sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) - status, _ = session.RunComparison(None, - None, + self._json_keys, '', '') + status, _ = session.RunComparison('', + '', None, optional_keys={'foo': 'bar'}) self.assertEqual(status, skia_gold_session.SkiaGoldSession.StatusCodes.SUCCESS) - self.assertEqual(auth_mock.call_count, 1) - self.assertEqual(init_mock.call_count, 1) - self.assertEqual(compare_mock.call_count, 1) - self.assertEqual(diff_mock.call_count, 0) - compare_mock.assert_called_with(name=None, - png_file=mock.ANY, - inexact_matching_args=None, - optional_keys={'foo': 'bar'}) - - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate') - def test_diffFailure(self, auth_mock, init_mock, compare_mock, diff_mock): - auth_mock.return_value = (0, None) - init_mock.return_value = (0, None) - compare_mock.return_value = (1, 'Compare failed') - diff_mock.return_value = (1, 'Diff failed') + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.init_mock.call_count, 1) + self.assertEqual(self.compare_mock.call_count, 1) + self.assertEqual(self.diff_mock.call_count, 0) + self.compare_mock.assert_called_with(name='', + png_file=mock.ANY, + inexact_matching_args=None, + optional_keys={'foo': 'bar'}, + force_dryrun=False) + + def test_compareForceDryrun(self) -> None: + self.auth_mock.return_value = (0, None) + self.init_mock.return_value = (0, None) + self.compare_mock.return_value = (0, None) + self.diff_mock.return_value = (0, None) + args = createSkiaGoldArgs(local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + status, _ = session.RunComparison('', '', None, force_dryrun=True) + self.assertEqual(status, + skia_gold_session.SkiaGoldSession.StatusCodes.SUCCESS) + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.init_mock.call_count, 1) + self.assertEqual(self.compare_mock.call_count, 1) + self.assertEqual(self.diff_mock.call_count, 0) + self.compare_mock.assert_called_with(name='', + png_file=mock.ANY, + inexact_matching_args=None, + optional_keys=None, + force_dryrun=True) + + def test_diffFailure(self) -> None: + self.auth_mock.return_value = (0, None) + self.init_mock.return_value = (0, None) + self.compare_mock.return_value = (1, 'Compare failed') + self.diff_mock.return_value = (1, 'Diff failed') args = createSkiaGoldArgs(local_pixel_tests=True) sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) - status, error = session.RunComparison(None, None, + self._json_keys, '', '') + status, error = session.RunComparison('', '', 'Definitely an output manager') self.assertEqual( status, skia_gold_session.SkiaGoldSession.StatusCodes.LOCAL_DIFF_FAILURE) self.assertEqual(error, 'Diff failed') - self.assertEqual(auth_mock.call_count, 1) - self.assertEqual(init_mock.call_count, 1) - self.assertEqual(compare_mock.call_count, 1) - self.assertEqual(diff_mock.call_count, 1) - - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize') - @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate') - def test_noOutputManagerLocal(self, auth_mock, init_mock, compare_mock, - diff_mock): - auth_mock.return_value = (0, None) - init_mock.return_value = (0, None) - compare_mock.return_value = (1, 'Compare failed') - diff_mock.return_value = (0, None) + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.init_mock.call_count, 1) + self.assertEqual(self.compare_mock.call_count, 1) + self.assertEqual(self.diff_mock.call_count, 1) + + def test_noOutputManagerLocal(self) -> None: + self.auth_mock.return_value = (0, None) + self.init_mock.return_value = (0, None) + self.compare_mock.return_value = (1, 'Compare failed') + self.diff_mock.return_value = (0, None) args = createSkiaGoldArgs(local_pixel_tests=True) sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) - status, error = session.RunComparison(None, None, None) + self._json_keys, '', '') + status, error = session.RunComparison('', '', None) self.assertEqual( status, skia_gold_session.SkiaGoldSession.StatusCodes.NO_OUTPUT_MANAGER) self.assertEqual(error, 'No output manager for local diff images') - self.assertEqual(auth_mock.call_count, 1) - self.assertEqual(compare_mock.call_count, 1) - self.assertEqual(diff_mock.call_count, 0) + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.compare_mock.call_count, 1) + self.assertEqual(self.diff_mock.call_count, 0) class SkiaGoldSessionAuthenticateTest(fake_filesystem_unittest.TestCase): """Tests the functionality of SkiaGoldSession.Authenticate.""" - def setUp(self): + def setUp(self) -> None: self.setUpPyfakefs() self._working_dir = tempfile.mkdtemp() self._json_keys = tempfile.NamedTemporaryFile(delete=False).name - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_commandOutputReturned(self, cmd_mock): - cmd_mock.return_value = (1, 'Something bad :(') + self.cmd_patcher = mock.patch.object(skia_gold_session.SkiaGoldSession, + '_RunCmdForRcAndOutput') + self.cmd_mock = self.cmd_patcher.start() + self.addCleanup(self.cmd_patcher.stop) + + def test_commandOutputReturned(self) -> None: + self.cmd_mock.return_value = (1, 'Something bad :(') args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') rc, stdout = session.Authenticate() - self.assertEqual(cmd_mock.call_count, 1) + self.assertEqual(self.cmd_mock.call_count, 1) self.assertEqual(rc, 1) self.assertEqual(stdout, 'Something bad :(') - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_bypassSkiaGoldFunctionality(self, cmd_mock): - cmd_mock.return_value = (None, None) + def test_bypassSkiaGoldFunctionality(self) -> None: + self.cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a', bypass_skia_gold_functionality=True) sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') rc, _ = session.Authenticate() self.assertEqual(rc, 0) - cmd_mock.assert_not_called() + self.cmd_mock.assert_not_called() - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_shortCircuitAlreadyAuthenticated(self, cmd_mock): - cmd_mock.return_value = (None, None) + def test_shortCircuitAlreadyAuthenticated(self) -> None: + self.cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') session._authenticated = True rc, _ = session.Authenticate() self.assertEqual(rc, 0) - cmd_mock.assert_not_called() + self.cmd_mock.assert_not_called() - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_successSetsShortCircuit(self, cmd_mock): - cmd_mock.return_value = (0, None) + def test_successSetsShortCircuit(self) -> None: + self.cmd_mock.return_value = (0, None) args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') self.assertFalse(session._authenticated) rc, _ = session.Authenticate() self.assertEqual(rc, 0) self.assertTrue(session._authenticated) - cmd_mock.assert_called_once() + self.cmd_mock.assert_called_once() - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_failureDoesNotSetShortCircuit(self, cmd_mock): - cmd_mock.return_value = (1, None) + def test_failureDoesNotSetShortCircuit(self) -> None: + self.cmd_mock.return_value = (1, None) args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') self.assertFalse(session._authenticated) rc, _ = session.Authenticate() self.assertEqual(rc, 1) self.assertFalse(session._authenticated) - cmd_mock.assert_called_once() + self.cmd_mock.assert_called_once() - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_commandWithUseLuciTrue(self, cmd_mock): - cmd_mock.return_value = (None, None) + def test_commandWithUseLuciTrue(self) -> None: + self.cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') session.Authenticate(use_luci=True) - self.assertIn('--luci', cmd_mock.call_args[0][0]) + self.assertIn('--luci', self.cmd_mock.call_args[0][0]) - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_commandWithUseLuciFalse(self, cmd_mock): - cmd_mock.return_value = (None, None) + def test_commandWithUseLuciFalse(self) -> None: + self.cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True) sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') session.Authenticate(use_luci=False) - self.assertNotIn('--luci', cmd_mock.call_args[0][0]) + self.assertNotIn('--luci', self.cmd_mock.call_args[0][0]) - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_commandWithUseLuciFalseNotLocal(self, cmd_mock): - cmd_mock.return_value = (None, None) + def test_commandWithUseLuciFalseNotLocal(self) -> None: + self.cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False) sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') with self.assertRaises(RuntimeError): session.Authenticate(use_luci=False) - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_commandCommonArgs(self, cmd_mock): - cmd_mock.return_value = (None, None) + def test_commandWithUseLuciAndServiceAccount(self) -> None: + args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + with self.assertRaises(AssertionError): + session.Authenticate(use_luci=True, service_account='a') + + def test_commandWithServiceAccount(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + session.Authenticate(use_luci=False, service_account='service_account') + call_args = self.cmd_mock.call_args[0][0] + self.assertNotIn('--luci', call_args) + assertArgWith(self, call_args, '--service-account', 'service_account') + + def test_commandCommonArgs(self) -> None: + self.cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') session.Authenticate() - call_args = cmd_mock.call_args[0][0] + call_args = self.cmd_mock.call_args[0][0] self.assertIn('auth', call_args) assertArgWith(self, call_args, '--work-dir', self._working_dir) @@ -363,64 +384,64 @@ def test_commandCommonArgs(self, cmd_mock): class SkiaGoldSessionInitializeTest(fake_filesystem_unittest.TestCase): """Tests the functionality of SkiaGoldSession.Initialize.""" - def setUp(self): + def setUp(self) -> None: self.setUpPyfakefs() self._working_dir = tempfile.mkdtemp() self._json_keys = tempfile.NamedTemporaryFile(delete=False).name - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_bypassSkiaGoldFunctionality(self, cmd_mock): - cmd_mock.return_value = (None, None) + self.cmd_patcher = mock.patch.object(skia_gold_session.SkiaGoldSession, + '_RunCmdForRcAndOutput') + self.cmd_mock = self.cmd_patcher.start() + self.addCleanup(self.cmd_patcher.stop) + + def test_bypassSkiaGoldFunctionality(self) -> None: + self.cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a', bypass_skia_gold_functionality=True) sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') rc, _ = session.Initialize() self.assertEqual(rc, 0) - cmd_mock.assert_not_called() + self.cmd_mock.assert_not_called() - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_shortCircuitAlreadyInitialized(self, cmd_mock): - cmd_mock.return_value = (None, None) + def test_shortCircuitAlreadyInitialized(self) -> None: + self.cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') session._initialized = True rc, _ = session.Initialize() self.assertEqual(rc, 0) - cmd_mock.assert_not_called() + self.cmd_mock.assert_not_called() - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_successSetsShortCircuit(self, cmd_mock): - cmd_mock.return_value = (0, None) + def test_successSetsShortCircuit(self) -> None: + self.cmd_mock.return_value = (0, None) args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') self.assertFalse(session._initialized) rc, _ = session.Initialize() self.assertEqual(rc, 0) self.assertTrue(session._initialized) - cmd_mock.assert_called_once() + self.cmd_mock.assert_called_once() - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_failureDoesNotSetShortCircuit(self, cmd_mock): - cmd_mock.return_value = (1, None) + def test_failureDoesNotSetShortCircuit(self) -> None: + self.cmd_mock.return_value = (1, None) args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') self.assertFalse(session._initialized) rc, _ = session.Initialize() self.assertEqual(rc, 1) self.assertFalse(session._initialized) - cmd_mock.assert_called_once() + self.cmd_mock.assert_called_once() - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_commandCommonArgs(self, cmd_mock): - cmd_mock.return_value = (None, None) + def test_commandCommonArgs(self) -> None: + self.cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, @@ -430,7 +451,7 @@ def test_commandCommonArgs(self, cmd_mock): instance='instance', bucket='bucket') session.Initialize() - call_args = cmd_mock.call_args[0][0] + call_args = self.cmd_mock.call_args[0][0] self.assertIn('imgtest', call_args) self.assertIn('init', call_args) self.assertIn('--passfail', call_args) @@ -444,27 +465,25 @@ def test_commandCommonArgs(self, cmd_mock): assertArgWith(self, call_args, '--failure-file', session._triage_link_file) assertArgWith(self, call_args, '--commit', 'a') - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_commandTryjobArgs(self, cmd_mock): - cmd_mock.return_value = (None, None) + def test_commandTryjobArgs(self) -> None: + self.cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a', gerrit_issue=1, gerrit_patchset=2, buildbucket_id=3) sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') session.Initialize() - call_args = cmd_mock.call_args[0][0] + call_args = self.cmd_mock.call_args[0][0] assertArgWith(self, call_args, '--issue', '1') assertArgWith(self, call_args, '--patchset', '2') assertArgWith(self, call_args, '--jobid', '3') assertArgWith(self, call_args, '--crs', 'gerrit') assertArgWith(self, call_args, '--cis', 'buildbucket') - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_commandTryjobArgsNonDefaultCrs(self, cmd_mock): - cmd_mock.return_value = (None, None) + def test_commandTryjobArgsNonDefaultCrs(self) -> None: + self.cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(code_review_system='foo', git_revision='a', gerrit_issue=1, @@ -472,24 +491,23 @@ def test_commandTryjobArgsNonDefaultCrs(self, cmd_mock): buildbucket_id=3) sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') session.Initialize() - call_args = cmd_mock.call_args[0][0] + call_args = self.cmd_mock.call_args[0][0] assertArgWith(self, call_args, '--issue', '1') assertArgWith(self, call_args, '--patchset', '2') assertArgWith(self, call_args, '--jobid', '3') assertArgWith(self, call_args, '--crs', 'foo') assertArgWith(self, call_args, '--cis', 'buildbucket') - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_commandTryjobArgsMissing(self, cmd_mock): - cmd_mock.return_value = (None, None) + def test_commandTryjobArgsMissing(self) -> None: + self.cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') session.Initialize() - call_args = cmd_mock.call_args[0][0] + call_args = self.cmd_mock.call_args[0][0] self.assertNotIn('--issue', call_args) self.assertNotIn('--patchset', call_args) self.assertNotIn('--jobid', call_args) @@ -500,69 +518,77 @@ def test_commandTryjobArgsMissing(self, cmd_mock): class SkiaGoldSessionCompareTest(fake_filesystem_unittest.TestCase): """Tests the functionality of SkiaGoldSession.Compare.""" - def setUp(self): + def setUp(self) -> None: self.setUpPyfakefs() self._working_dir = tempfile.mkdtemp() self._json_keys = tempfile.NamedTemporaryFile(delete=False).name - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_commandOutputReturned(self, cmd_mock): - cmd_mock.return_value = (1, 'Something bad :(') + self.cmd_patcher = mock.patch.object(skia_gold_session.SkiaGoldSession, + '_RunCmdForRcAndOutput') + self.cmd_mock = self.cmd_patcher.start() + self.addCleanup(self.cmd_patcher.stop) + + def test_commandOutputReturned(self) -> None: + self.cmd_mock.return_value = (1, 'Something bad :(') args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) - rc, stdout = session.Compare(None, None) - self.assertEqual(cmd_mock.call_count, 1) + self._json_keys, '', '') + rc, stdout = session.Compare('', '') + self.assertEqual(self.cmd_mock.call_count, 1) self.assertEqual(rc, 1) self.assertEqual(stdout, 'Something bad :(') - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_bypassSkiaGoldFunctionality(self, cmd_mock): - cmd_mock.return_value = (None, None) + def test_bypassSkiaGoldFunctionality(self) -> None: + self.cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a', bypass_skia_gold_functionality=True) sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) - rc, _ = session.Compare(None, None) + self._json_keys, '', '') + rc, _ = session.Compare('', '') self.assertEqual(rc, 0) - cmd_mock.assert_not_called() + self.cmd_mock.assert_not_called() - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_commandWithLocalPixelTestsTrue(self, cmd_mock): - cmd_mock.return_value = (None, None) + def test_commandWithLocalPixelTestsTrue(self) -> None: + self.cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True) sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) - session.Compare(None, None) - self.assertIn('--dryrun', cmd_mock.call_args[0][0]) + self._json_keys, '', '') + session.Compare('', '') + self.assertIn('--dryrun', self.cmd_mock.call_args[0][0]) - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_commandWithLocalPixelTestsFalse(self, cmd_mock): - cmd_mock.return_value = (None, None) + def test_commandWithForceDryrunTrue(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + session.Compare('', '', force_dryrun=True) + self.assertIn('--dryrun', self.cmd_mock.call_args[0][0]) + + def test_commandWithLocalPixelTestsFalse(self) -> None: + self.cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False) sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) - session.Compare(None, None) - self.assertNotIn('--dryrun', cmd_mock.call_args[0][0]) + self._json_keys, '', '') + session.Compare('', '') + self.assertNotIn('--dryrun', self.cmd_mock.call_args[0][0]) - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_commandWithInexactArgs(self, cmd_mock): - cmd_mock.return_value = (None, None) + def test_commandWithInexactArgs(self) -> None: + self.cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) - session.Compare(None, None, inexact_matching_args=['--inexact', 'foobar']) - self.assertIn('--inexact', cmd_mock.call_args[0][0]) - self.assertIn('foobar', cmd_mock.call_args[0][0]) + self._json_keys, '', '') + session.Compare('', '', inexact_matching_args=['--inexact', 'foobar']) + self.assertIn('--inexact', self.cmd_mock.call_args[0][0]) + self.assertIn('foobar', self.cmd_mock.call_args[0][0]) - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_commandCommonArgs(self, cmd_mock): - cmd_mock.return_value = (None, None) + def test_commandCommonArgs(self) -> None: + self.cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, @@ -571,20 +597,19 @@ def test_commandCommonArgs(self, cmd_mock): 'corpus', instance='instance') session.Compare('name', 'png_file') - call_args = cmd_mock.call_args[0][0] + call_args = self.cmd_mock.call_args[0][0] self.assertIn('imgtest', call_args) self.assertIn('add', call_args) assertArgWith(self, call_args, '--test-name', 'name') assertArgWith(self, call_args, '--png-file', 'png_file') assertArgWith(self, call_args, '--work-dir', self._working_dir) - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_noLinkOnSuccess(self, cmd_mock): - cmd_mock.return_value = (0, None) + def test_noLinkOnSuccess(self) -> None: + self.cmd_mock.return_value = (0, None) args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') rc, _ = session.Compare('name', 'png_file') self.assertEqual(rc, 0) comparison_result = session._comparison_results['name'] @@ -592,9 +617,8 @@ def test_noLinkOnSuccess(self, cmd_mock): self.assertEqual(comparison_result.internal_triage_link, None) self.assertNotEqual(comparison_result.triage_link_omission_reason, None) - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_clLinkOnTrybot(self, cmd_mock): - cmd_mock.return_value = (1, None) + def test_clLinkOnTrybot(self) -> None: + self.cmd_mock.return_value = (1, None) args = createSkiaGoldArgs(git_revision='a', gerrit_issue=1, gerrit_patchset=2, @@ -603,7 +627,7 @@ def test_clLinkOnTrybot(self, cmd_mock): session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, self._json_keys, - None, + '', instance='instance') rc, _ = session.Compare('name', 'png_file') self.assertEqual(rc, 1) @@ -618,14 +642,13 @@ def test_clLinkOnTrybot(self, cmd_mock): self.assertEqual(session.GetTriageLinks('name'), (public_link, internal_link)) - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_individualLinkOnCi(self, cmd_mock): + def test_individualLinkOnCi(self) -> None: args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, self._json_keys, - None, + '', instance='foobar') internal_link = 'foobar-gold.skia.org' @@ -636,7 +659,7 @@ def WriteTriageLinkFile(_): f.write(internal_link) return (1, None) - cmd_mock.side_effect = WriteTriageLinkFile + self.cmd_mock.side_effect = WriteTriageLinkFile rc, _ = session.Compare('name', 'png_file') self.assertEqual(rc, 1) comparison_result = session._comparison_results['name'] @@ -648,19 +671,18 @@ def WriteTriageLinkFile(_): self.assertEqual(session.GetTriageLinks('name'), (public_link, internal_link)) - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_validOmissionOnMissingLink(self, cmd_mock): + def test_validOmissionOnMissingLink(self) -> None: args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') def WriteTriageLinkFile(_): with open(session._triage_link_file, 'w'): pass return (1, None) - cmd_mock.side_effect = WriteTriageLinkFile + self.cmd_mock.side_effect = WriteTriageLinkFile rc, _ = session.Compare('name', 'png_file') self.assertEqual(rc, 1) comparison_result = session._comparison_results['name'] @@ -669,19 +691,18 @@ def WriteTriageLinkFile(_): self.assertIn('Gold did not provide a triage link', comparison_result.triage_link_omission_reason) - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_validOmissionOnIoError(self, cmd_mock): - cmd_mock.return_value = (1, None) + def test_validOmissionOnIoError(self) -> None: + self.cmd_mock.return_value = (1, None) args = createSkiaGoldArgs(git_revision='a') sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') def DeleteTriageLinkFile(_): os.remove(session._triage_link_file) return (1, None) - cmd_mock.side_effect = DeleteTriageLinkFile + self.cmd_mock.side_effect = DeleteTriageLinkFile rc, _ = session.Compare('name', 'png_file') self.assertEqual(rc, 1) comparison_result = session._comparison_results['name'] @@ -691,86 +712,89 @@ def DeleteTriageLinkFile(_): self.assertIn('Failed to read', comparison_result.triage_link_omission_reason) - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_optionalKeysPassedToGoldctl(self, cmd_mock): - cmd_mock.return_value = (None, None) + def test_optionalKeysPassedToGoldctl(self) -> None: + self.cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True) sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) - session.Compare(None, None, optional_keys={'foo': 'bar'}) - assertArgWith(self, cmd_mock.call_args[0][0], '--add-test-optional-key', - 'foo:bar') + self._json_keys, '', '') + session.Compare('', '', optional_keys={'foo': 'bar'}) + assertArgWith(self, self.cmd_mock.call_args[0][0], + '--add-test-optional-key', 'foo:bar') class SkiaGoldSessionDiffTest(fake_filesystem_unittest.TestCase): """Tests the functionality of SkiaGoldSession.Diff.""" - def setUp(self): + def setUp(self) -> None: self.setUpPyfakefs() self._working_dir = tempfile.mkdtemp() self._json_keys = tempfile.NamedTemporaryFile(delete=False).name + self.cmd_patcher = mock.patch.object(skia_gold_session.SkiaGoldSession, + '_RunCmdForRcAndOutput') + self.cmd_mock = self.cmd_patcher.start() + self.addCleanup(self.cmd_patcher.stop) + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_StoreDiffLinks') - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_commandOutputReturned(self, cmd_mock, _): - cmd_mock.return_value = (1, 'Something bad :(') + def test_commandOutputReturned(self, _) -> None: + self.cmd_mock.return_value = (1, 'Something bad :(') args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False) sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) - rc, stdout = session.Diff(None, None, None) - self.assertEqual(cmd_mock.call_count, 1) + self._json_keys, '', '') + rc, stdout = session.Diff('', '', None) + self.assertEqual(self.cmd_mock.call_count, 1) self.assertEqual(rc, 1) self.assertEqual(stdout, 'Something bad :(') - @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput') - def test_bypassSkiaGoldFunctionality(self, cmd_mock): - cmd_mock.return_value = (None, None) + def test_bypassSkiaGoldFunctionality(self) -> None: + self.cmd_mock.return_value = (None, None) args = createSkiaGoldArgs(git_revision='a', bypass_skia_gold_functionality=True) sgp = skia_gold_properties.SkiaGoldProperties(args) session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, - self._json_keys, None, None) + self._json_keys, '', '') with self.assertRaises(RuntimeError): - session.Diff(None, None, None) + session.Diff('', '', None) class SkiaGoldSessionTriageLinkOmissionTest(fake_filesystem_unittest.TestCase): """Tests the functionality of SkiaGoldSession.GetTriageLinkOmissionReason.""" - def setUp(self): + def setUp(self) -> None: self.setUpPyfakefs() self._working_dir = tempfile.mkdtemp() - def _CreateSession(self): + def _CreateSession(self) -> skia_gold_session.SkiaGoldSession: + sgp = skia_gold_properties.SkiaGoldProperties(createSkiaGoldArgs()) json_keys = tempfile.NamedTemporaryFile(delete=False).name - session = skia_gold_session.SkiaGoldSession(self._working_dir, None, - json_keys, None, None) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + json_keys, '', '') session._comparison_results = { 'foo': skia_gold_session.SkiaGoldSession.ComparisonResults(), } return session - def test_noComparison(self): + def test_noComparison(self) -> None: session = self._CreateSession() session._comparison_results = {} reason = session.GetTriageLinkOmissionReason('foo') self.assertEqual(reason, 'No image comparison performed for foo') - def test_validReason(self): + def test_validReason(self) -> None: session = self._CreateSession() session._comparison_results['foo'].triage_link_omission_reason = 'bar' reason = session.GetTriageLinkOmissionReason('foo') self.assertEqual(reason, 'bar') - def test_onlyLocal(self): + def test_onlyLocal(self) -> None: session = self._CreateSession() session._comparison_results['foo'].local_diff_given_image = 'bar' reason = session.GetTriageLinkOmissionReason('foo') self.assertEqual(reason, 'Gold only used to do a local image diff') - def test_onlyWithoutTriageLink(self): + def test_onlyWithoutTriageLink(self) -> None: session = self._CreateSession() comparison_result = session._comparison_results['foo'] comparison_result.public_triage_link = 'bar' @@ -781,7 +805,7 @@ def test_onlyWithoutTriageLink(self): with self.assertRaises(AssertionError): session.GetTriageLinkOmissionReason('foo') - def test_resultsShouldNotExist(self): + def test_resultsShouldNotExist(self) -> None: session = self._CreateSession() with self.assertRaises(RuntimeError): session.GetTriageLinkOmissionReason('foo') diff --git a/build/skia_gold_common/unittest_utils.py b/build/skia_gold_common/unittest_utils.py index cd46ce8b43ce..4fe23a9bf84b 100644 --- a/build/skia_gold_common/unittest_utils.py +++ b/build/skia_gold_common/unittest_utils.py @@ -1,13 +1,17 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Utility methods for Skia Gold functionality unittests.""" +import argparse import collections +import typing +from typing import Optional _SkiaGoldArgs = collections.namedtuple('_SkiaGoldArgs', [ 'local_pixel_tests', 'no_luci_auth', + 'service_account', 'code_review_system', 'continuous_integration_system', 'git_revision', @@ -15,19 +19,26 @@ 'gerrit_patchset', 'buildbucket_id', 'bypass_skia_gold_functionality', + 'skia_gold_local_png_write_directory', ]) -def createSkiaGoldArgs(local_pixel_tests=None, - no_luci_auth=None, - code_review_system=None, - continuous_integration_system=None, - git_revision=None, - gerrit_issue=None, - gerrit_patchset=None, - buildbucket_id=None, - bypass_skia_gold_functionality=None): - return _SkiaGoldArgs(local_pixel_tests, no_luci_auth, code_review_system, - continuous_integration_system, git_revision, - gerrit_issue, gerrit_patchset, buildbucket_id, - bypass_skia_gold_functionality) +def createSkiaGoldArgs(local_pixel_tests: Optional[bool] = None, + no_luci_auth: Optional[bool] = None, + service_account: Optional[str] = None, + code_review_system: Optional[str] = None, + continuous_integration_system: Optional[str] = None, + git_revision: Optional[str] = None, + gerrit_issue: Optional[int] = None, + gerrit_patchset: Optional[int] = None, + buildbucket_id: Optional[int] = None, + bypass_skia_gold_functionality: Optional[bool] = None, + skia_gold_local_png_write_directory: Optional[str] = None + ) -> argparse.Namespace: + return typing.cast( + argparse.Namespace, + _SkiaGoldArgs(local_pixel_tests, no_luci_auth, service_account, + code_review_system, continuous_integration_system, + git_revision, gerrit_issue, gerrit_patchset, buildbucket_id, + bypass_skia_gold_functionality, + skia_gold_local_png_write_directory)) diff --git a/build/symlink.gni b/build/symlink.gni index dcaa5e5045d0..e71128643410 100644 --- a/build/symlink.gni +++ b/build/symlink.gni @@ -1,4 +1,4 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -22,6 +22,9 @@ template("symlink") { rebase_path(invoker.source, get_path_info(invoker.output, "dir")), rebase_path(invoker.output, root_build_dir), ] + if (defined(invoker.touch) && invoker.touch) { + args += [ "--touch=" + rebase_path(invoker.source, root_build_dir) ] + } } } diff --git a/build/symlink.py b/build/symlink.py index 0f90696a6569..ad938072d59e 100755 --- a/build/symlink.py +++ b/build/symlink.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -83,6 +83,7 @@ def Main(argv): if options.touch: + os.makedirs(os.path.dirname(options.touch), exist_ok=True) with open(options.touch, 'w'): pass diff --git a/build/timestamp.gni b/build/timestamp.gni index 4d805c094253..b9b57d9dd0cf 100644 --- a/build/timestamp.gni +++ b/build/timestamp.gni @@ -1,4 +1,4 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # diff --git a/build/toolchain/BUILD.gn b/build/toolchain/BUILD.gn index 00f3e1115be1..73e3a71cc21b 100644 --- a/build/toolchain/BUILD.gn +++ b/build/toolchain/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -11,8 +11,8 @@ declare_args() { action_pool_depth = -1 } -if (is_starboardized_toolchain) { - if (action_pool_depth == -1 || (use_goma || use_rbe)) { +if (is_starboardized_toolchain || current_toolchain == default_toolchain) { + if (action_pool_depth == -1 || (use_goma || use_remoteexec)) { action_pool_depth = exec_script("get_cpu_count.py", [], "value") } @@ -23,4 +23,8 @@ if (is_starboardized_toolchain) { pool("action_pool") { depth = action_pool_depth } + + pool("remote_action_pool") { + depth = 1000 + } } diff --git a/build/toolchain/OWNERS b/build/toolchain/OWNERS index d7012d39ad9c..90229ac68e78 100644 --- a/build/toolchain/OWNERS +++ b/build/toolchain/OWNERS @@ -1,5 +1,2 @@ -dpranke@google.com -scottmg@chromium.org - # Code Coverage. -per-file *code_coverage*=liaoyuke@chromium.org +per-file *code_coverage*=pasthana@google.com diff --git a/build/toolchain/aix/BUILD.gn b/build/toolchain/aix/BUILD.gn index 523b54ef61a9..71e4de29befc 100644 --- a/build/toolchain/aix/BUILD.gn +++ b/build/toolchain/aix/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -18,7 +18,7 @@ gcc_toolchain("ppc64") { current_os = "aix" # reclient does not support gcc. - use_rbe = false + use_remoteexec = false is_clang = false } } diff --git a/build/toolchain/android/BUILD.gn b/build/toolchain/android/BUILD.gn index 3299dceb99db..20257d9ace19 100644 --- a/build/toolchain/android/BUILD.gn +++ b/build/toolchain/android/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -15,14 +15,8 @@ declare_args() { android_unstripped_runtime_outputs = true } -# The Android clang toolchains share most of the same parameters, so we have this -# wrapper around gcc_toolchain to avoid duplication of logic. -# -# Parameters: -# - binary_prefix -# Prefix of compiler executables. template("android_clang_toolchain") { - gcc_toolchain(target_name) { + clang_toolchain(target_name) { assert(defined(invoker.toolchain_args), "toolchain_args must be defined for android_clang_toolchain()") @@ -43,33 +37,40 @@ template("android_clang_toolchain") { # Output linker map files for binary size analysis. enable_linker_map = true - _android_tool_prefix = - "$android_toolchain_root/bin/${invoker.binary_prefix}-" - - # The tools should be run relative to the build dir. - _tool_prefix = rebase_path("$_android_tool_prefix", root_build_dir) - - _prefix = rebase_path("$clang_base_path/bin", root_build_dir) - cc = "$_prefix/clang" - cxx = "$_prefix/clang++" - ar = "$_prefix/llvm-ar" - ld = cxx - readelf = _tool_prefix + "readelf" - nm = "$_prefix/llvm-nm" - strip = "$_prefix/llvm-strip" + strip = rebase_path("$clang_base_path/bin/llvm-strip", root_build_dir) if (_use_debug_fission) { - dwp = _tool_prefix + "dwp" + # llvm-dwp does not work with thin lto, so use binutils one. + # https://crbug.com/1264130 + if (toolchain_args.current_cpu == "arm") { + _dwp = "arm-linux-androideabi-4.9/prebuilt/linux-x86_64/bin/arm-linux-androideabi-dwp" + } else if (toolchain_args.current_cpu == "arm64") { + _dwp = "aarch64-linux-android-4.9/prebuilt/linux-x86_64/bin/aarch64-linux-android-dwp" + } else if (toolchain_args.current_cpu == "x86") { + _dwp = "x86-4.9/prebuilt/linux-x86_64/bin/i686-linux-android-dwp" + } else if (toolchain_args.current_cpu == "x64") { + _dwp = "x86_64-4.9/prebuilt/linux-x86_64/bin/x86_64-linux-android-dwp" + } else { + _dwp = "llvm/prebuilt/linux-x86_64/bin/llvm-dwp" + } + + dwp = rebase_path("$android_ndk_root/toolchains/$_dwp", root_build_dir) } + use_unstripped_as_runtime_outputs = android_unstripped_runtime_outputs # Don't use .cr.so for loadable_modules since they are always loaded via # absolute path. loadable_module_extension = ".so" + + # We propagate configs to allow cross-toolchain JNI include directories to + # work. This flag does not otherwise affect our build, but if applied to + # non-android toolchains, it causes unwanted configs from perfetto to + # propagate from host_toolchain deps. + propagates_configs = true } } android_clang_toolchain("android_clang_x86") { - binary_prefix = "i686-linux-android" toolchain_args = { current_cpu = "x86" @@ -79,12 +80,14 @@ android_clang_toolchain("android_clang_x86") { # TODO(crbug.com/865376) use_clang_coverage = false - # This turns off all of the LaCrOS-specific flags. A LaCrOS build may use - # |ash_clang_x64| toolchain, which is a chromeos toolchain, to build - # Ash-Chrome in a subdirectory, and because chromeos toolchain uses android - # toolchain, which eventually resulted in that android toolchains being used - # inside a LaCrOS build. + # This turns off all of the LaCrOS-specific flags. A LaCrOS related build + # may use |ash_clang_x64| or |lacros_clang_x64| toolchain, which are + # chromeos toolchains, to build Ash-Chrome or Lacros-Chrome in a + # subdirectory, and because chromeos toolchain uses android toolchain, which + # eventually resulted in that android toolchains being used inside a LaCrOS + # build. also_build_ash_chrome = false + also_build_lacros_chrome = false chromeos_is_browser_only = false ozone_platform = "" ozone_platform_wayland = false @@ -92,21 +95,18 @@ android_clang_toolchain("android_clang_x86") { } android_clang_toolchain("android_clang_arm") { - binary_prefix = "arm-linux-androideabi" toolchain_args = { current_cpu = "arm" } } android_clang_toolchain("android_clang_mipsel") { - binary_prefix = "mipsel-linux-android" toolchain_args = { current_cpu = "mipsel" } } android_clang_toolchain("android_clang_x64") { - binary_prefix = "x86_64-linux-android" toolchain_args = { current_cpu = "x64" @@ -116,12 +116,14 @@ android_clang_toolchain("android_clang_x64") { # TODO(crbug.com/865376) use_clang_coverage = false - # This turns off all of the LaCrOS-specific flags. A LaCrOS build may use - # |ash_clang_x64| toolchain, which is a chromeos toolchain, to build - # Ash-Chrome in a subdirectory, and because chromeos toolchain uses android - # toolchain, which eventually resulted in that android toolchains being used - # inside a LaCrOS build. + # This turns off all of the LaCrOS-specific flags. A LaCrOS related build + # may use |ash_clang_x64| or |lacros_clang_x64| toolchain, which are + # chromeos toolchains, to build Ash-Chrome or Lacros-Chrome in a + # subdirectory, and because chromeos toolchain uses android toolchain, which + # eventually resulted in that android toolchains being used inside a LaCrOS + # build. also_build_ash_chrome = false + also_build_lacros_chrome = false chromeos_is_browser_only = false ozone_platform = "" ozone_platform_wayland = false @@ -129,14 +131,12 @@ android_clang_toolchain("android_clang_x64") { } android_clang_toolchain("android_clang_arm64") { - binary_prefix = "aarch64-linux-android" toolchain_args = { current_cpu = "arm64" } } android_clang_toolchain("android_clang_arm64_hwasan") { - binary_prefix = "aarch64-linux-android" toolchain_args = { current_cpu = "arm64" is_hwasan = true @@ -145,8 +145,24 @@ android_clang_toolchain("android_clang_arm64_hwasan") { } android_clang_toolchain("android_clang_mips64el") { - binary_prefix = "mips64el-linux-android" toolchain_args = { current_cpu = "mips64el" } } + +# Toolchain for creating native libraries that can be used by +# robolectric_binary targets. It does not emulate NDK APIs nor make available +# NDK header files. +# Targets that opt into defining JNI entrypoints should use the +# //third_party/jdk:jdk config to make jni.h available. +# This toolchain will set: +# is_linux = true +# is_android = false +# is_robolectric = true +clang_toolchain("robolectric_$host_cpu") { + toolchain_args = { + current_os = host_os + current_cpu = host_cpu + is_robolectric = true + } +} diff --git a/build/toolchain/android/DIR_METADATA b/build/toolchain/android/DIR_METADATA new file mode 100644 index 000000000000..cdc2d6fb6eb6 --- /dev/null +++ b/build/toolchain/android/DIR_METADATA @@ -0,0 +1 @@ +mixins: "//build/android/COMMON_METADATA" diff --git a/build/toolchain/apple/.style.yapf b/build/toolchain/apple/.style.yapf new file mode 100644 index 000000000000..557fa7bf84c0 --- /dev/null +++ b/build/toolchain/apple/.style.yapf @@ -0,0 +1,2 @@ +[style] +based_on_style = pep8 diff --git a/build/toolchain/apple/BUILD.gn b/build/toolchain/apple/BUILD.gn index 6f074fd2d868..ce5a7059eb5e 100644 --- a/build/toolchain/apple/BUILD.gn +++ b/build/toolchain/apple/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2021 The Chromium Authors. All rights reserved. +# Copyright 2021 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/toolchain/apple/filter_libtool.py b/build/toolchain/apple/filter_libtool.py index 44c1c4400d24..269093bbbf5e 100644 --- a/build/toolchain/apple/filter_libtool.py +++ b/build/toolchain/apple/filter_libtool.py @@ -1,8 +1,7 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import print_function import os import re diff --git a/build/toolchain/apple/get_tool_mtime.py b/build/toolchain/apple/get_tool_mtime.py index ff0254c635e6..4ce19e1cc73a 100644 --- a/build/toolchain/apple/get_tool_mtime.py +++ b/build/toolchain/apple/get_tool_mtime.py @@ -1,8 +1,7 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import print_function import os import sys diff --git a/build/toolchain/apple/linker_driver.py b/build/toolchain/apple/linker_driver.py index c21e18a0fb06..415a9fd21ec9 100755 --- a/build/toolchain/apple/linker_driver.py +++ b/build/toolchain/apple/linker_driver.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -9,15 +9,18 @@ import shutil import subprocess import sys +import tempfile -# On mac, the values of these globals are modified when parsing -Wcrl, flags. On -# ios, the script uses the defaults. -DSYMUTIL_INVOKE = ['xcrun', 'dsymutil'] -STRIP_INVOKE = ['xcrun', 'strip'] +# The path to `whole_archive`. +sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')) -# Setting this flag will emit a deterministic binary by stripping dates from the -# N_OSO field. -DETERMINISTIC_FLAG = '--deterministic' +import whole_archive + +# Prefix for all custom linker driver arguments. +LINKER_DRIVER_ARG_PREFIX = '-Wcrl,' +# Linker action to create a directory and pass it to the linker as +# `-object_path_lto`. Special-cased since it has to run before the link. +OBJECT_PATH_LTO = 'object_path_lto' # The linker_driver.py is responsible for forwarding a linker invocation to # the compiler driver, while processing special arguments itself. @@ -33,274 +36,333 @@ # removal of the special driver arguments, described below). Then the driver # performs additional actions, based on these arguments: # -# -Wcrl,dsym, -# After invoking the linker, this will run `dsymutil` on the linker's -# output, producing a dSYM bundle, stored at dsym_path_prefix. As an -# example, if the linker driver were invoked with: -# "... -o out/gn/obj/foo/libbar.dylib ... -Wcrl,dsym,out/gn ..." -# The resulting dSYM would be out/gn/libbar.dylib.dSYM/. +# -Wcrl,installnametoolpath, +# Sets the path to the `install_name_tool` to run with +# -Wcrl,installnametool, in which case `xcrun` is not used to invoke it. +# +# -Wcrl,installnametool, +# After invoking the linker, this will run install_name_tool on the linker's +# output. |arguments| are comma-separated arguments to be passed to the +# install_name_tool command. # -# -Wcrl,dsymutilpath, -# Sets the path to the dsymutil to run with -Wcrl,dsym, in which case -# `xcrun` is not used to invoke it. +# -Wcrl,dsym, +# After invoking the linker, this will run `dsymutil` on the linker's +# output, producing a dSYM bundle, stored at dsym_path_prefix. As an +# example, if the linker driver were invoked with: +# "... -o out/gn/obj/foo/libbar.dylib ... -Wcrl,dsym,out/gn ..." +# The resulting dSYM would be out/gn/libbar.dylib.dSYM/. # -# -Wcrl,unstripped, -# After invoking the linker, and before strip, this will save a copy of -# the unstripped linker output in the directory unstripped_path_prefix. +# -Wcrl,dsymutilpath, +# Sets the path to the dsymutil to run with -Wcrl,dsym, in which case +# `xcrun` is not used to invoke it. # -# -Wcrl,strip, -# After invoking the linker, and optionally dsymutil, this will run -# the strip command on the linker's output. strip_arguments are -# comma-separated arguments to be passed to the strip command. +# -Wcrl,unstripped, +# After invoking the linker, and before strip, this will save a copy of +# the unstripped linker output in the directory unstripped_path_prefix. # -# -Wcrl,strippath, -# Sets the path to the strip to run with -Wcrl,strip, in which case -# `xcrun` is not used to invoke it. - - -def Main(args): - """Main function for the linker driver. Separates out the arguments for - the main compiler driver and the linker driver, then invokes all the - required tools. - - Args: - args: list of string, Arguments to the script. - """ - - if len(args) < 2: - raise RuntimeError("Usage: linker_driver.py [linker-invocation]") - - # Collect arguments to the linker driver (this script) and remove them from - # the arguments being passed to the compiler driver. - linker_driver_actions = {} - compiler_driver_args = [] - deterministic = False - for arg in args[1:]: - if arg.startswith(_LINKER_DRIVER_ARG_PREFIX): - # Convert driver actions into a map of name => lambda to invoke. - driver_action = ProcessLinkerDriverArg(arg) - assert driver_action[0] not in linker_driver_actions - linker_driver_actions[driver_action[0]] = driver_action[1] - elif arg == DETERMINISTIC_FLAG: - deterministic = True - else: - compiler_driver_args.append(arg) - - linker_driver_outputs = [_FindLinkerOutput(compiler_driver_args)] - - try: - # Zero the mtime in OSO fields for deterministic builds. - # https://crbug.com/330262. - env = os.environ.copy() - if deterministic: - env['ZERO_AR_DATE'] = '1' - # Run the linker by invoking the compiler driver. - subprocess.check_call(compiler_driver_args, env=env) - - # Run the linker driver actions, in the order specified by the actions list. - for action in _LINKER_DRIVER_ACTIONS: - name = action[0] - if name in linker_driver_actions: - linker_driver_outputs += linker_driver_actions[name](args) - except: - # If a linker driver action failed, remove all the outputs to make the - # build step atomic. - map(_RemovePath, linker_driver_outputs) - - # Re-report the original failure. - raise - - -def ProcessLinkerDriverArg(arg): - """Processes a linker driver argument and returns a tuple containing the - name and unary lambda to invoke for that linker driver action. - - Args: - arg: string, The linker driver argument. - - Returns: - A 2-tuple: - 0: The driver action name, as in _LINKER_DRIVER_ACTIONS. - 1: An 1-ary lambda that takes the full list of arguments passed to - Main(). The lambda should call the linker driver action that - corresponds to the argument and return a list of outputs from the - action. - """ - if not arg.startswith(_LINKER_DRIVER_ARG_PREFIX): - raise ValueError('%s is not a linker driver argument' % (arg, )) - - sub_arg = arg[len(_LINKER_DRIVER_ARG_PREFIX):] - - for driver_action in _LINKER_DRIVER_ACTIONS: - (name, action) = driver_action - if sub_arg.startswith(name): - return (name, lambda full_args: action(sub_arg[len(name):], full_args)) - - raise ValueError('Unknown linker driver argument: %s' % (arg, )) - - -def RunDsymUtil(dsym_path_prefix, full_args): - """Linker driver action for -Wcrl,dsym,. Invokes dsymutil - on the linker's output and produces a dsym file at |dsym_file| path. - - Args: - dsym_path_prefix: string, The path at which the dsymutil output should be - located. - full_args: list of string, Full argument list for the linker driver. - - Returns: - list of string, Build step outputs. - """ - if not len(dsym_path_prefix): - raise ValueError('Unspecified dSYM output file') - - linker_out = _FindLinkerOutput(full_args) - base = os.path.basename(linker_out) - dsym_out = os.path.join(dsym_path_prefix, base + '.dSYM') - - # Remove old dSYMs before invoking dsymutil. - _RemovePath(dsym_out) - - tools_paths = _FindToolsPaths(full_args) - if os.environ.get('PATH'): - tools_paths.append(os.environ['PATH']) - dsymutil_env = os.environ.copy() - dsymutil_env['PATH'] = ':'.join(tools_paths) - subprocess.check_call(DSYMUTIL_INVOKE + ['-o', dsym_out, linker_out], - env=dsymutil_env) - return [dsym_out] - - -def SetDsymutilPath(dsymutil_path, full_args): - """Linker driver action for -Wcrl,dsymutilpath,. - - Sets the invocation command for dsymutil, which allows the caller to specify - an alternate dsymutil. This action is always processed before the RunDsymUtil - action. - - Args: - dsymutil_path: string, The path to the dsymutil binary to run - full_args: list of string, Full argument list for the linker driver. - - Returns: - No output - this step is run purely for its side-effect. - """ - global DSYMUTIL_INVOKE - DSYMUTIL_INVOKE = [dsymutil_path] - return [] - - -def RunSaveUnstripped(unstripped_path_prefix, full_args): - """Linker driver action for -Wcrl,unstripped,. Copies - the linker output to |unstripped_path_prefix| before stripping. - - Args: - unstripped_path_prefix: string, The path at which the unstripped output - should be located. - full_args: list of string, Full argument list for the linker driver. - - Returns: - list of string, Build step outputs. - """ - if not len(unstripped_path_prefix): - raise ValueError('Unspecified unstripped output file') - - linker_out = _FindLinkerOutput(full_args) - base = os.path.basename(linker_out) - unstripped_out = os.path.join(unstripped_path_prefix, base + '.unstripped') - - shutil.copyfile(linker_out, unstripped_out) - return [unstripped_out] - - -def RunStrip(strip_args_string, full_args): - """Linker driver action for -Wcrl,strip,. - - Args: - strip_args_string: string, Comma-separated arguments for `strip`. - full_args: list of string, Full arguments for the linker driver. - - Returns: - list of string, Build step outputs. - """ - strip_command = list(STRIP_INVOKE) - if len(strip_args_string) > 0: - strip_command += strip_args_string.split(',') - strip_command.append(_FindLinkerOutput(full_args)) - subprocess.check_call(strip_command) - return [] - - -def SetStripPath(strip_path, full_args): - """Linker driver action for -Wcrl,strippath,. - - Sets the invocation command for strip, which allows the caller to specify - an alternate strip. This action is always processed before the RunStrip - action. - - Args: - strip_path: string, The path to the strip binary to run - full_args: list of string, Full argument list for the linker driver. - - Returns: - No output - this step is run purely for its side-effect. - """ - global STRIP_INVOKE - STRIP_INVOKE = [strip_path] - return [] - - -def _FindLinkerOutput(full_args): - """Finds the output of the linker by looking for the output flag in its - argument list. As this is a required linker argument, raises an error if it - cannot be found. - """ - # The linker_driver.py script may be used to wrap either the compiler linker - # (uses -o to configure the output) or lipo (uses -output to configure the - # output). Since wrapping the compiler linker is the most likely possibility - # use try/except and fallback to checking for -output if -o is not found. - try: - output_flag_index = full_args.index('-o') - except ValueError: - output_flag_index = full_args.index('-output') - return full_args[output_flag_index + 1] - - -def _FindToolsPaths(full_args): - """Finds all paths where the script should look for additional tools.""" - paths = [] - for idx, arg in enumerate(full_args): - if arg in ['-B', '--prefix']: - paths.append(full_args[idx + 1]) - elif arg.startswith('-B'): - paths.append(arg[2:]) - elif arg.startswith('--prefix='): - paths.append(arg[9:]) - return paths - - -def _RemovePath(path): - """Removes the file or directory at |path| if it exists.""" - if os.path.exists(path): - if os.path.isdir(path): - shutil.rmtree(path) - else: - os.unlink(path) - - -_LINKER_DRIVER_ARG_PREFIX = '-Wcrl,' -"""List of linker driver actions. The sort order of this list affects the -order in which the actions are invoked. The first item in the tuple is the -argument's -Wcrl, and the second is the function to invoke. -""" -_LINKER_DRIVER_ACTIONS = [ - ('dsymutilpath,', SetDsymutilPath), - ('dsym,', RunDsymUtil), - ('unstripped,', RunSaveUnstripped), - ('strippath,', SetStripPath), - ('strip,', RunStrip), -] +# -Wcrl,strip, +# After invoking the linker, and optionally dsymutil, this will run +# the strip command on the linker's output. strip_arguments are +# comma-separated arguments to be passed to the strip command. +# +# -Wcrl,strippath, +# Sets the path to the strip to run with -Wcrl,strip, in which case +# `xcrun` is not used to invoke it. +# -Wcrl,object_path_lto +# Creates temporary directory for LTO object files. + + +class LinkerDriver(object): + def __init__(self, args): + """Creates a new linker driver. + + Args: + args: list of string, Arguments to the script. + """ + if len(args) < 2: + raise RuntimeError("Usage: linker_driver.py [linker-invocation]") + self._args = args + + # List of linker driver actions. **The sort order of this list affects + # the order in which the actions are invoked.** + # The first item in the tuple is the argument's -Wcrl, + # and the second is the function to invoke. + self._actions = [ + ('installnametoolpath,', self.set_install_name_tool_path), + ('installnametool,', self.run_install_name_tool), + ('dsymutilpath,', self.set_dsymutil_path), + ('dsym,', self.run_dsymutil), + ('unstripped,', self.run_save_unstripped), + ('strippath,', self.set_strip_path), + ('strip,', self.run_strip), + ] + + # Linker driver actions can modify the these values. + self._install_name_tool_cmd = ['xcrun', 'install_name_tool'] + self._dsymutil_cmd = ['xcrun', 'dsymutil'] + self._strip_cmd = ['xcrun', 'strip'] + + # The linker output file, lazily computed in self._get_linker_output(). + self._linker_output = None + # The temporary directory for intermediate LTO object files. If it + # exists, it will clean itself up on script exit. + self._object_path_lto = None + + def run(self): + """Runs the linker driver, separating out the main compiler driver's + arguments from the ones handled by this class. It then invokes the + required tools, starting with the compiler driver to produce the linker + output. + """ + # Collect arguments to the linker driver (this script) and remove them + # from the arguments being passed to the compiler driver. + linker_driver_actions = {} + compiler_driver_args = [] + for index, arg in enumerate(self._args[1:]): + if arg.startswith(LINKER_DRIVER_ARG_PREFIX): + # Convert driver actions into a map of name => lambda to invoke. + driver_action = self._process_driver_arg(arg) + assert driver_action[0] not in linker_driver_actions + linker_driver_actions[driver_action[0]] = driver_action[1] + else: + compiler_driver_args.append(arg) + + if self._object_path_lto is not None: + compiler_driver_args.append('-Wl,-object_path_lto,{}'.format( + self._object_path_lto.name)) + if self._get_linker_output() is None: + raise ValueError( + 'Could not find path to linker output (-o or --output)') + + # We want to link rlibs as --whole-archive if they are part of a unit + # test target. This is determined by switch + # `-LinkWrapper,add-whole-archive`. + compiler_driver_args = whole_archive.wrap_with_whole_archive( + compiler_driver_args) + + linker_driver_outputs = [self._get_linker_output()] + + try: + # Zero the mtime in OSO fields for deterministic builds. + # https://crbug.com/330262. + env = os.environ.copy() + env['ZERO_AR_DATE'] = '1' + # Run the linker by invoking the compiler driver. + subprocess.check_call(compiler_driver_args, env=env) + + # Run the linker driver actions, in the order specified by the + # actions list. + for action in self._actions: + name = action[0] + if name in linker_driver_actions: + linker_driver_outputs += linker_driver_actions[name]() + except: + # If a linker driver action failed, remove all the outputs to make + # the build step atomic. + map(_remove_path, linker_driver_outputs) + + # Re-report the original failure. + raise + + def _get_linker_output(self): + """Returns the value of the output argument to the linker.""" + if not self._linker_output: + for index, arg in enumerate(self._args): + if arg in ('-o', '-output', '--output'): + self._linker_output = self._args[index + 1] + break + return self._linker_output + + def _process_driver_arg(self, arg): + """Processes a linker driver argument and returns a tuple containing the + name and unary lambda to invoke for that linker driver action. + + Args: + arg: string, The linker driver argument. + + Returns: + A 2-tuple: + 0: The driver action name, as in |self._actions|. + 1: A lambda that calls the linker driver action with its direct + argument and returns a list of outputs from the action. + """ + if not arg.startswith(LINKER_DRIVER_ARG_PREFIX): + raise ValueError('%s is not a linker driver argument' % (arg, )) + + sub_arg = arg[len(LINKER_DRIVER_ARG_PREFIX):] + # Special-cased, since it needs to run before the link. + # TODO(lgrey): Remove if/when we start running `dsymutil` + # through the clang driver. See https://crbug.com/1324104 + if sub_arg == OBJECT_PATH_LTO: + self._object_path_lto = tempfile.TemporaryDirectory( + dir=os.getcwd()) + return (OBJECT_PATH_LTO, lambda: []) + + for driver_action in self._actions: + (name, action) = driver_action + if sub_arg.startswith(name): + return (name, lambda: action(sub_arg[len(name):])) + + raise ValueError('Unknown linker driver argument: %s' % (arg, )) + + def set_install_name_tool_path(self, install_name_tool_path): + """Linker driver action for -Wcrl,installnametoolpath,. + + Sets the invocation command for install_name_tool, which allows the + caller to specify an alternate path. This action is always + processed before the run_install_name_tool action. + + Args: + install_name_tool_path: string, The path to the install_name_tool + binary to run + + Returns: + No output - this step is run purely for its side-effect. + """ + self._install_name_tool_cmd = [install_name_tool_path] + return [] + + def run_install_name_tool(self, args_string): + """Linker driver action for -Wcrl,installnametool,. Invokes + install_name_tool on the linker's output. + + Args: + args_string: string, Comma-separated arguments for + `install_name_tool`. + + Returns: + No output - this step is run purely for its side-effect. + """ + command = list(self._install_name_tool_cmd) + command.extend(args_string.split(',')) + command.append(self._get_linker_output()) + subprocess.check_call(command) + return [] + + def run_dsymutil(self, dsym_path_prefix): + """Linker driver action for -Wcrl,dsym,. Invokes + dsymutil on the linker's output and produces a dsym file at |dsym_file| + path. + + Args: + dsym_path_prefix: string, The path at which the dsymutil output + should be located. + + Returns: + list of string, Build step outputs. + """ + if not len(dsym_path_prefix): + raise ValueError('Unspecified dSYM output file') + + linker_output = self._get_linker_output() + base = os.path.basename(linker_output) + dsym_out = os.path.join(dsym_path_prefix, base + '.dSYM') + + # Remove old dSYMs before invoking dsymutil. + _remove_path(dsym_out) + + tools_paths = _find_tools_paths(self._args) + if os.environ.get('PATH'): + tools_paths.append(os.environ['PATH']) + dsymutil_env = os.environ.copy() + dsymutil_env['PATH'] = ':'.join(tools_paths) + subprocess.check_call(self._dsymutil_cmd + + ['-o', dsym_out, linker_output], + env=dsymutil_env) + return [dsym_out] + + def set_dsymutil_path(self, dsymutil_path): + """Linker driver action for -Wcrl,dsymutilpath,. + + Sets the invocation command for dsymutil, which allows the caller to + specify an alternate dsymutil. This action is always processed before + the RunDsymUtil action. + + Args: + dsymutil_path: string, The path to the dsymutil binary to run + + Returns: + No output - this step is run purely for its side-effect. + """ + self._dsymutil_cmd = [dsymutil_path] + return [] + + def run_save_unstripped(self, unstripped_path_prefix): + """Linker driver action for -Wcrl,unstripped,. + Copies the linker output to |unstripped_path_prefix| before stripping. + + Args: + unstripped_path_prefix: string, The path at which the unstripped + output should be located. + + Returns: + list of string, Build step outputs. + """ + if not len(unstripped_path_prefix): + raise ValueError('Unspecified unstripped output file') + + base = os.path.basename(self._get_linker_output()) + unstripped_out = os.path.join(unstripped_path_prefix, + base + '.unstripped') + + shutil.copyfile(self._get_linker_output(), unstripped_out) + return [unstripped_out] + + def run_strip(self, strip_args_string): + """Linker driver action for -Wcrl,strip,. + + Args: + strip_args_string: string, Comma-separated arguments for `strip`. + + Returns: + list of string, Build step outputs. + """ + strip_command = list(self._strip_cmd) + if len(strip_args_string) > 0: + strip_command += strip_args_string.split(',') + strip_command.append(self._get_linker_output()) + subprocess.check_call(strip_command) + return [] + + def set_strip_path(self, strip_path): + """Linker driver action for -Wcrl,strippath,. + + Sets the invocation command for strip, which allows the caller to + specify an alternate strip. This action is always processed before the + RunStrip action. + + Args: + strip_path: string, The path to the strip binary to run + + Returns: + No output - this step is run purely for its side-effect. + """ + self._strip_cmd = [strip_path] + return [] + + +def _find_tools_paths(full_args): + """Finds all paths where the script should look for additional tools.""" + paths = [] + for idx, arg in enumerate(full_args): + if arg in ['-B', '--prefix']: + paths.append(full_args[idx + 1]) + elif arg.startswith('-B'): + paths.append(arg[2:]) + elif arg.startswith('--prefix='): + paths.append(arg[9:]) + return paths + + +def _remove_path(path): + """Removes the file or directory at |path| if it exists.""" + if os.path.exists(path): + if os.path.isdir(path): + shutil.rmtree(path) + else: + os.unlink(path) + if __name__ == '__main__': - Main(sys.argv) - sys.exit(0) + LinkerDriver(sys.argv).run() + sys.exit(0) diff --git a/build/toolchain/apple/toolchain.gni b/build/toolchain/apple/toolchain.gni index 80fa7b37572c..a3803e157d0b 100644 --- a/build/toolchain/apple/toolchain.gni +++ b/build/toolchain/apple/toolchain.gni @@ -1,4 +1,4 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -10,25 +10,53 @@ import("//build/config/apple/symbols.gni") import("//build/config/clang/clang.gni") import("//build/config/compiler/compiler.gni") import("//build/config/coverage/coverage.gni") +import("//build/config/rust.gni") import("//build/toolchain/cc_wrapper.gni") import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") import("//build/toolchain/toolchain.gni") +import("//build_overrides/build.gni") + +# TODO(crbug.com/1370527): This import is required to detect whether the +# build is for the catalyst environment in order to disable the hermetic +# swift compiler (as it does not include support for catalyst). Remove it +# once the support is available. +if (is_ios) { + import("//build/config/ios/config.gni") +} assert((target_os == "ios" && host_os == "mac") || host_os != "win") declare_args() { - # This makes the linker set timestamps in Mach-O files to 0. This isn't - # enabled by default because this breaks Xcode's lldb. This has been fixed in - # https://reviews.llvm.org/rL368199 but that has not yet made it into a public - # lldb release. - mac_deterministic_build = false - # This controls whether whole module optimization is enabled when building # Swift modules. If enabled, the compiler will compile the module as one # unit, generating just one single object file. Otherwise, it will generate # one object file per .swift file. If unspecified, will default to "true" # for official builds, and "false" for all other builds. swift_whole_module_optimization = -1 + + # If unspecified, will use the toolchain downloaded via deps. + swift_toolchain_path = -1 +} + +# TODO(crbug.com/1370527): Remove this and replace with `build_with_chromium` +# once the support for catalyst is available in the hermetic swift compiler. +_can_use_hermetic_swift = + build_with_chromium && is_ios && target_environment != "catalyst" + +if (swift_toolchain_path == -1) { + if (_can_use_hermetic_swift) { + # Version of the hermetic compiler. Needs to be updated when a new version of + # the compiler is rolled to ensure that all outputs are regenerated. It must + # be kept in sync with the `version` of `third_party/swift-toolchain` in + # //DEPS. + swiftc_version = "swift-5.7-release" + + # Use the hermetic swift toolchain. + swift_toolchain_path = "//third_party/swift-toolchain/" + } else { + swift_toolchain_path = "" + } } if (swift_whole_module_optimization == -1) { @@ -57,8 +85,11 @@ tool_versions = "trim scope") # Shared toolchain definition. Invocations should set current_os to set the -# build args in this definition. -template("apple_toolchain") { +# build args in this definition. This is titled "single_apple_toolchain" +# because it makes exactly one toolchain. Callers will normally want to +# invoke instead "apple_toolchain" which may make an additional toolchain +# without sanitizers. +template("single_apple_toolchain") { toolchain(target_name) { # When invoking this toolchain not as the default one, these args will be # passed to the build. They are ignored when this is the default toolchain. @@ -73,12 +104,21 @@ template("apple_toolchain") { # ensure that it's always the same, regardless of the values that may be # set on those toolchains. host_toolchain = host_toolchain + + # Similarly for the host toolchain which can be used to make .dylibs + # that will successfully load into prebuilt tools. + host_toolchain_no_sanitizers = host_toolchain_no_sanitizers } # When the invoker has explicitly overridden use_goma or cc_wrapper in the # toolchain args, use those values, otherwise default to the global one. # This works because the only reasonable override that toolchains might # supply for these values are to force-disable them. + if (defined(toolchain_args.use_remoteexec)) { + toolchain_uses_remoteexec = toolchain_args.use_remoteexec + } else { + toolchain_uses_remoteexec = use_remoteexec + } if (defined(toolchain_args.use_goma)) { toolchain_uses_goma = toolchain_args.use_goma } else { @@ -89,6 +129,34 @@ template("apple_toolchain") { } else { toolchain_cc_wrapper = cc_wrapper } + assert(!(toolchain_uses_remoteexec && toolchain_uses_goma), + "Goma and re-client can't be used together.") + assert(!(toolchain_cc_wrapper != "" && toolchain_uses_remoteexec), + "re-client and cc_wrapper can't be used together.") + assert(!(toolchain_cc_wrapper != "" && toolchain_uses_goma), + "Goma and cc_wrapper can't be used together.") + + if (defined(toolchain_args.use_lld)) { + toolchain_uses_lld = toolchain_args.use_lld + } else { + toolchain_uses_lld = use_lld + } + + # The value of all global variables (such as `is_component_build`) is the + # one from the default toolchain when evaluating a secondary toolchain + # (see https://crbug.com/gn/286). This mean that the value may change when + # evaluating target/configs in the new toolchain if the variable default + # value depends on variable set in `toolchain_args`. + # + # For this reason, "ios" needs to override `is_component_build` as its + # default value depends on `current_os`. Use the overridden value if it + # is set in `toolchain_args`. + if (defined(toolchain_args.is_component_build)) { + toolchain_is_component_build = toolchain_args.is_component_build + } else { + toolchain_is_component_build = is_component_build + } + if (defined(toolchain_args.use_xcode_clang)) { toolchain_uses_xcode_clang = toolchain_args.use_xcode_clang } else { @@ -109,10 +177,22 @@ template("apple_toolchain") { swiftmodule_switch = "-Wl,-add_ast_path," # Compute the compiler prefix. - if (toolchain_uses_goma) { + if (toolchain_uses_remoteexec) { + if (defined(toolchain_args.rbe_cc_cfg_file)) { + toolchain_rbe_cc_cfg_file = toolchain_args.rbe_cc_cfg_file + } else { + toolchain_rbe_cc_cfg_file = rbe_cc_cfg_file + } + + # C/C++ (clang) rewrapper prefix to use when use_remoteexec is true. + compiler_prefix = "${rbe_bin_dir}/rewrapper -cfg=${toolchain_rbe_cc_cfg_file} -exec_root=${rbe_exec_root} " + } else if (toolchain_uses_goma) { assert(toolchain_cc_wrapper == "", "Goma and cc_wrapper can't be used together.") compiler_prefix = "$goma_dir/gomacc " + if (use_goma_rust) { + rust_compiler_prefix = compiler_prefix + } } else if (toolchain_cc_wrapper != "") { compiler_prefix = toolchain_cc_wrapper + " " } else { @@ -125,7 +205,7 @@ template("apple_toolchain") { # Set the explicit search path for clang++ so it uses the right linker # binary. - if (!use_lld) { + if (!toolchain_uses_lld) { ld += " -B " + invoker.bin_path } @@ -154,23 +234,10 @@ template("apple_toolchain") { # Specify an explicit path for the strip binary. _strippath = invoker.bin_path + "strip" - linker_driver += " -Wcrl,strippath," + _strippath - - if (mac_deterministic_build) { - linker_driver += " --deterministic" - } - - # On iOS, the final applications are assembled using lipo (to support fat - # builds). The correct flags are passed to the linker_driver.py script - # directly during the lipo call. The test is against the target_os because - # there is no need to create .dSYMs for targets compiled for the host. - if (defined(invoker.strip_with_lipo) && invoker.strip_with_lipo) { - _enable_dsyms = false - _save_unstripped_output = false - } else { - _enable_dsyms = enable_dsyms - _save_unstripped_output = save_unstripped_output - } + _installnametoolpath = invoker.bin_path + "install_name_tool" + linker_driver += " -Wcrl,strippath,${_strippath} -Wcrl,installnametoolpath,${_installnametoolpath}" + _enable_dsyms = enable_dsyms + _save_unstripped_output = save_unstripped_output # Make these apply to all tools below. lib_switch = "-l" @@ -203,6 +270,144 @@ template("apple_toolchain") { _unstripped_output = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.unstripped" } + if (toolchain_has_rust) { + if (!defined(rust_compiler_prefix)) { + rust_compiler_prefix = "" + } + rustc_bin = rebase_path("${rust_sysroot}/bin/rustc", root_build_dir) + rustc = "$rust_compiler_prefix${rustc_bin}" + rust_sysroot_relative_to_out = rebase_path(rust_sysroot, root_out_dir) + rustc_wrapper = rebase_path("//build/rust/rustc_wrapper.py") + + tool("rust_staticlib") { + libname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + rspfile = "$libname.rsp" + depfile = "$libname.d" + + default_output_extension = ".a" + output_prefix = "lib" + default_output_dir = "{{root_out_dir}}" + description = "RUST(STATICLIB) {{output}}" + outputs = [ libname ] + + # TODO(danakj): When `!toolchain_uses_lld` do we need to specify a path + # to libtool like the "alink" rule? + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"$_cxx\" $rustc_common_args --emit=dep-info=$depfile,link -o $libname LDFLAGS RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_rlib") { + # We must always prefix with `lib` even if the library already starts + # with that prefix or else our stdlib is unable to find libc.rlib (or + # actually liblibc.rlib). + rlibname = + "{{output_dir}}/lib{{target_output_name}}{{output_extension}}" + depfile = "$rlibname.d" + + # Do not use rsp files in this (common) case because they occupy the + # ninja main thread, and {{rlibs}} have shorter command lines than + # fully linked targets. + + default_output_extension = ".rlib" + + # This is prefixed unconditionally in `rlibname`. + # output_prefix = "lib" + default_output_dir = "{{root_out_dir}}" + description = "RUST {{output}}" + outputs = [ rlibname ] + + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile -- -Clinker=\"$_cxx\" $rustc_common_args {{rustdeps}} {{externs}} --emit=dep-info=$depfile,link -o $rlibname LDFLAGS RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_bin") { + exename = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + rspfile = "$exename.rsp" + depfile = "$exename.d" + pool = "//build/toolchain:link_pool($default_toolchain)" + + # TODO(danakj): solink can generate TOC files for re-exporting library + # symbols, and we should do the same here. + + default_output_dir = "{{root_out_dir}}" + description = "RUST(BIN) {{output}}" + outputs = [ exename ] + + # TODO(danakj): Support dsym_switch like C++ targets. + # link_command += dsym_switch + # if (_enable_dsyms) { + # outputs += dsym_output + # } + # if (_save_unstripped_output) { + # outputs += [ _unstripped_output ] + # } + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${_cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $exename LDFLAGS {{ldflags}} RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_cdylib") { + dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + rspfile = "$dllname.rsp" + depfile = "$dllname.d" + pool = "//build/toolchain:link_pool($default_toolchain)" + + # TODO(danakj): solink can generate TOC files for re-exporting library + # symbols, and we should do the same here. + + default_output_extension = ".dylib" + output_prefix = "lib" + default_output_dir = "{{root_out_dir}}" + description = "RUST(CDYLIB) {{output}}" + outputs = [ dllname ] + + # TODO(danakj): Support dsym_switch like C++ targets. + # link_command += dsym_switch + # if (_enable_dsyms) { + # outputs += dsym_output + # } + # if (_save_unstripped_output) { + # outputs += [ _unstripped_output ] + # } + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${_cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_macro") { + dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + rspfile = "$dllname.rsp" + depfile = "$dllname.d" + pool = "//build/toolchain:link_pool($default_toolchain)" + + # TODO(danakj): solink can generate TOC files for re-exporting library + # symbols, and we should do the same here. + + default_output_extension = ".dylib" + output_prefix = "lib" + default_output_dir = "{{root_out_dir}}" + description = "RUST(MACRO) {{output}}" + outputs = [ dllname ] + + # TODO(danakj): Support dsym_switch like C++ targets. + # link_command += dsym_switch + # if (_enable_dsyms) { + # outputs += dsym_output + # } + # if (_save_unstripped_output) { + # outputs += [ _unstripped_output ] + # } + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${_cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + } + tool("cc") { depfile = "{{output}}.d" precompiled_header_type = "gcc" @@ -250,30 +455,19 @@ template("apple_toolchain") { tool("alink") { rspfile = "{{output}}.rsp" + rspfile_content = "{{inputs}}" - if (!use_lld) { - # Note about -filelist: Apple's linker reads the file list file and - # interprets each newline-separated chunk of text as a file name. It - # doesn't do the things one would expect from the shell like unescaping - # or handling quotes. In contrast, when Ninja finds a file name with - # spaces, it single-quotes them in $inputs_newline as it would normally - # do for command-line arguments. Thus any source names with spaces, or - # label names with spaces (which GN bases the output paths on) will be - # corrupted by this process. Don't use spaces for source files or - # labels. - rspfile_content = "{{inputs_newline}}" - + if (!toolchain_uses_lld) { script = rebase_path("//build/toolchain/apple/filter_libtool.py", root_build_dir) # Specify explicit path for libtool. libtool = invoker.bin_path + "libtool" - command = "rm -f {{output}} && TOOL_VERSION=${tool_versions.filter_libtool} $python_path $script $libtool -static -D {{arflags}} -o {{output}} -filelist $rspfile" + command = "rm -f {{output}} && TOOL_VERSION=${tool_versions.filter_libtool} $python_path $script $libtool -static -D {{arflags}} -o {{output}} @$rspfile" description = "LIBTOOL-STATIC {{output}}" } else { - rspfile_content = "{{inputs}}" ar = "${prefix}llvm-ar" - command = "\"$ar\" {{arflags}} -r -c -s -D {{output}} \"@$rspfile\"" + command = "\"$ar\" {{arflags}} -r -c -s -D {{output}} @$rspfile" # Remove the output file first so that ar doesn't try to modify the # existing file. @@ -318,18 +512,18 @@ template("apple_toolchain") { does_reexport_command = "[ ! -e \"$dylib\" -o ! -e \"$tocname\" ] || $otool -l \"$dylib\" | grep -q LC_REEXPORT_DYLIB" link_command = "$linker_driver $ld -shared " - if (is_component_build) { + if (toolchain_is_component_build) { link_command += " -Wl,-install_name,@rpath/\"{{target_output_name}}{{output_extension}}\" " } link_command += dsym_switch - link_command += "{{ldflags}} -o \"$dylib\" -Wl,-filelist,\"$rspfile\" {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}}" + link_command += "{{ldflags}} -o \"$dylib\" \"@$rspfile\"" replace_command = "if ! cmp -s \"$temporary_tocname\" \"$tocname\"; then mv \"$temporary_tocname\" \"$tocname\"" extract_toc_command = "{ $otool -l \"$dylib\" | grep LC_ID_DYLIB -A 5; $nm -gPp \"$dylib\" | cut -f1-2 -d' ' | grep -v U\$\$; true; }" command = "if $does_reexport_command ; then $link_command && $extract_toc_command > \"$tocname\"; else $link_command && $extract_toc_command > \"$temporary_tocname\" && $replace_command ; fi; fi" - rspfile_content = "{{inputs_newline}}" + rspfile_content = "{{inputs}} {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}} {{rlibs}}" description = "SOLINK {{output}}" @@ -369,15 +563,12 @@ template("apple_toolchain") { rspfile = sofile + ".rsp" pool = "//build/toolchain:link_pool($default_toolchain)" - link_command = "$linker_driver $ld -bundle {{ldflags}} -o \"$sofile\" -Wl,-filelist,\"$rspfile\"" - if (is_component_build) { - link_command += " -Wl,-install_name,@rpath/{{target_output_name}}{{output_extension}}" - } + link_command = + "$linker_driver $ld -bundle {{ldflags}} -o \"$sofile\" \"@$rspfile\"" link_command += dsym_switch - link_command += " {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}}" command = link_command - rspfile_content = "{{inputs_newline}}" + rspfile_content = "{{inputs}} {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}} {{rlibs}}" description = "SOLINK_MODULE {{output}}" @@ -402,17 +593,9 @@ template("apple_toolchain") { rspfile = "$outfile.rsp" pool = "//build/toolchain:link_pool($default_toolchain)" - # Note about -filelist: Apple's linker reads the file list file and - # interprets each newline-separated chunk of text as a file name. It - # doesn't do the things one would expect from the shell like unescaping - # or handling quotes. In contrast, when Ninja finds a file name with - # spaces, it single-quotes them in $inputs_newline as it would normally - # do for command-line arguments. Thus any source names with spaces, or - # label names with spaces (which GN bases the output paths on) will be - # corrupted by this process. Don't use spaces for source files or labels. - command = "$linker_driver $ld $dsym_switch {{ldflags}} -o \"$outfile\" -Wl,-filelist,\"$rspfile\" {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}}" + command = "$linker_driver $ld $dsym_switch {{ldflags}} -o \"$outfile\" \"@$rspfile\"" description = "LINK $outfile" - rspfile_content = "{{inputs_newline}}" + rspfile_content = "{{inputs}} {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}} {{rlibs}}" outputs = [ outfile ] if (_enable_dsyms) { @@ -449,7 +632,7 @@ template("apple_toolchain") { if (host_os == "mac") { command = "rm -rf {{output}} && /bin/cp -Rc {{source}} {{output}}" } else { - command = "rm -rf {{output}} && /bin/cp -Rl {{source}} {{output}}" + command = "rm -rf {{output}} && /bin/cp -Rld {{source}} {{output}}" } description = "COPY_BUNDLE_DATA {{source}} {{output}}" pool = "//build/toolchain/apple:bundle_pool($default_toolchain)" @@ -471,18 +654,21 @@ template("apple_toolchain") { # order. "{{target_gen_dir}}/{{module_name}}.swiftmodule", - "{{target_gen_dir}}/{{module_name}}.h", + "{{target_gen_dir}}/{{target_output_name}}.h", "{{target_gen_dir}}/{{module_name}}.swiftdoc", "{{target_gen_dir}}/{{module_name}}.swiftsourceinfo", ] + # Additional flags passed to the wrapper script but that are only + # set conditionally. + _extra_flags = "" + if (swift_whole_module_optimization) { - _extra_flags = "-whole-module-optimization" + _extra_flags += " -whole-module-optimization" _objects_dir = "{{target_out_dir}}" outputs += [ "$_objects_dir/{{module_name}}.o" ] } else { - _extra_flags = "" _objects_dir = "{{target_out_dir}}/{{label_name}}" partial_outputs = [ "$_objects_dir/{{source_name_part}}.o" ] @@ -493,13 +679,95 @@ template("apple_toolchain") { _env_vars += " DEVELOPER_DIR=${toolchain_args.sdk_developer_dir}" } + # Starting with version 5.6, the Swift compiler will always + # generates precompiled headers. In anterior version, it was + # used when bridging headers and whole module optimisation + # where enabled, and it could be disabled with the parameter + # `-disable-bridging-pch`. + # + # The precompiled headers are binary files (i.e. they are not + # regular Objective-C header files and cannot be loaded as such). + # + # There is an hidden requirements that the compiler needs to + # be told where to save those .pch files (via the parameter + # `-pch-output-dir $dir`). If this parameter is not passed, the + # compiler will silently write them at an incorrect location, + # leading later pass to try to load those .pch files as either + # regular header files (.h) or object files (.o) and causing + # to compilation failures. + # + # List the directory where the precompiled header is generated + # as an output, but do not list the .pch file itself. This is + # because the names includes two hashes (one corresponding to + # the compiler revision, and the other probably derived from + # the module itself) that are difficult to generate. + # + # Still we want to avoid creating a directory that has the same + # name as a file generated by another rule, so explicitly list + # the directory in `outputs` so that gn can warn it is conflicts + # with another output file. + + _pch_output_dir = "{{target_out_dir}}/{{module_name}}:pch/" + outputs += [ _pch_output_dir ] + + # Include the version of the compiler on the command-line. This causes + # `ninja` to consider all the compilation output to be dirty when the + # version changes. + if (defined(swiftc_version)) { + _extra_flags += " -swiftc-version $swiftc_version" + } + + # Include the version of Xcode on the command-line (if specified via + # toolchain_args). This causes `ninja` to consider all the compilation + # outputs to be dirty when the version change. + # + # This is required because sometimes module dependency changes between + # different version of Xcode (e.g. when moving from Xcode 14 beta 6 to + # Xcode 14 RC). If the swiftmodule are not rebuilt when the version + # changes, they may encode dependency on now non-existing frameworks + # causing linker failures ultimately. + if (defined(toolchain_args.xcode_build)) { + _extra_flags += " -xcode-version ${toolchain_args.xcode_build}" + } + + if (swift_toolchain_path != "") { + _extra_flags += " -swift-toolchain-path " + + rebase_path(swift_toolchain_path, root_build_dir) + } + + # The Swift compiler assumes that the generated header will be used by + # Objective-C code compiled with module support enabled (-fmodules). + # + # The import looks like this in the generated header: + # + # #if __has_feature(modules) + # @import UIKit; + # #endif + # + # As Chromium code is compiled without support for modules (i.e. the + # code is compiled without `-fmodules`), the dependent modules are not + # imported from the generated header, which causes compilation failure + # if the client code does not first import the required modules (see + # https://crbug.com/1316061 for details). + # + # Secondly, clang ToT always returns `1` when `__has_features(modules)` + # is evaluated, even if building with `-fno-modules` when building with + # `-std=c++20` (see https://crbug.com/1284275 for details). This causes + # the `@import` lines to be reached and the build to fail (since the + # support for modules is not enabled). + # + # Instruct swiftc.py to rewrite the generated header to use the old + # import pre-processor instructions (#import ) to work + # around those two issues. + _extra_flags += " -fix-module-imports" + command = "$_env_vars $python_path $_tool -module-name {{module_name}} " + - "-object-dir $_objects_dir " + + "-root-dir " + rebase_path("//", root_build_dir) + " " + + "-object-dir $_objects_dir -pch-output-dir $_pch_output_dir " + "-module-path {{target_gen_dir}}/{{module_name}}.swiftmodule " + - "-header-path {{target_gen_dir}}/{{module_name}}.h " + + "-header-path {{target_gen_dir}}/{{target_output_name}}.h " + "-depfile {{target_out_dir}}/{{module_name}}.d " + - "-depfile-filter {{target_gen_dir}}/{{module_name}}.swiftmodule " + "-bridge-header {{bridge_header}} $_extra_flags " + "{{swiftflags}} {{include_dirs}} {{module_dirs}} {{inputs}}" } @@ -519,10 +787,12 @@ template("apple_toolchain") { } command = - "$_env_vars $python_path $_tool -p \"${invoker.sdk_name}\" " + - "-t \"${invoker.deployment_target}\" " + - "-T \"{{bundle_product_type}}\" " + - "-P \"{{bundle_partial_info_plist}}\" " + "-o {{output}} {{inputs}}" + "$_env_vars $python_path $_tool " + + "-p '${toolchain_args.current_os}' " + + "-e '${invoker.target_environment}' " + + "-t '${invoker.deployment_target}' " + + "-T '{{bundle_product_type}}' " + + "-P '{{bundle_partial_info_plist}}' " + "-o {{output}} {{inputs}}" description = "COMPILE_XCASSETS {{output}}" pool = "//build/toolchain/apple:bundle_pool($default_toolchain)" @@ -534,3 +804,41 @@ template("apple_toolchain") { } } } + +# Makes a single Apple toolchain, or possibly two if we need a +# sanitizer-free equivalent. +template("apple_toolchain") { + single_apple_toolchain(target_name) { + assert(defined(invoker.toolchain_args), + "Toolchains must declare toolchain_args") + forward_variables_from(invoker, + "*", + [ + "visibility", + "test_only", + ]) + + # No need to forward visibility and test_only as they apply to targets not + # toolchains, but presubmit checks require that we explicitly exclude them + } + + if (using_sanitizer) { + # Make an additional toolchain with no sanitizers. + single_apple_toolchain("${target_name}_no_sanitizers") { + assert(defined(invoker.toolchain_args), + "Toolchains must declare toolchain_args") + forward_variables_from(invoker, + "*", + [ + "toolchain_args", + "visibility", + "test_only", + ]) + toolchain_args = { + # Populate toolchain args from the invoker. + forward_variables_from(invoker.toolchain_args, "*") + toolchain_disables_sanitizers = true + } + } + } +} diff --git a/build/toolchain/cc_wrapper.gni b/build/toolchain/cc_wrapper.gni index bdb8cea6fc0a..577a1fbef63e 100644 --- a/build/toolchain/cc_wrapper.gni +++ b/build/toolchain/cc_wrapper.gni @@ -1,4 +1,4 @@ -# Copyright (c) 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -79,5 +79,5 @@ if (is_starboard && cc_wrapper == "" && enable_cc_wrapper) { assert(!use_goma || cc_wrapper == "", "use_goma and cc_wrapper can not be used together.") -assert(!use_rbe || cc_wrapper == "", - "use_rbe and cc_wrapper can not be used together.") +assert(!use_remoteexec || cc_wrapper == "", + "use_remoteexec and cc_wrapper can not be used together.") diff --git a/build/toolchain/clang_code_coverage_wrapper.py b/build/toolchain/clang_code_coverage_wrapper.py index 7bd922295c35..5c9090114013 100755 --- a/build/toolchain/clang_code_coverage_wrapper.py +++ b/build/toolchain/clang_code_coverage_wrapper.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2018 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Removes code coverage flags from invocations of the Clang C/C++ compiler. @@ -46,7 +46,6 @@ --files-to-instrument=coverage_instrumentation_input.txt """ -from __future__ import print_function import argparse import os @@ -97,8 +96,8 @@ # shouldn't. # TODO(crbug.com/990948): Remove when the bug is fixed. '../../chrome/browser/media/router/providers/cast/cast_internal_message_util.cc', #pylint: disable=line-too-long - '../../components/cast_channel/cast_channel_enum.cc', - '../../components/cast_channel/cast_message_util.cc', + '../../components/media_router/common/providers/cast/channel/cast_channel_enum.cc', #pylint: disable=line-too-long + '../../components/media_router/common/providers/cast/channel/cast_message_util.cc', #pylint: disable=line-too-long '../../components/media_router/common/providers/cast/cast_media_source.cc', #pylint: disable=line-too-long '../../ui/events/keycodes/dom/keycode_converter.cc', # TODO(crbug.com/1051561): angle_unittests affected by coverage. @@ -215,6 +214,9 @@ def main(): # correct separator for the current platform (i.e. '\' on Windows and '/' # otherwise). compile_source_file = os.path.normpath(compile_command[source_flag_index + 1]) + extension = os.path.splitext(compile_source_file)[1] + if not extension in ['.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.S']: + raise Exception('Invalid source file %s found' % compile_source_file) exclusion_list = _COVERAGE_EXCLUSION_LIST_MAP.get( target_os, _DEFAULT_COVERAGE_EXCLUSION_LIST) force_list = _COVERAGE_FORCE_LIST_MAP.get(target_os, []) diff --git a/build/toolchain/concurrent_links.gni b/build/toolchain/concurrent_links.gni index e074dfdfa6f4..a10a5cd526fc 100644 --- a/build/toolchain/concurrent_links.gni +++ b/build/toolchain/concurrent_links.gni @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -36,7 +36,7 @@ if (concurrent_links == "") { # a little padding to account for future growth. _args += [ "--mem_per_link_gb=45" ] } else { - _args += [ "--mem_per_link_gb=10" ] + _args += [ "--mem_per_link_gb=20" ] } } else if ((use_clang_coverage && # When coverage_instrumentation_input_file is not empty it means @@ -61,8 +61,7 @@ if (concurrent_links == "") { } else if (is_android && !is_component_build && symbol_level == 2) { # Full debug symbols require large memory for link. _args = [ "--mem_per_link_gb=25" ] - } else if (is_android && !is_debug && !using_sanitizer && is_java_debug && - disable_android_lint && symbol_level < 2) { + } else if (is_android && !is_debug && !using_sanitizer && symbol_level < 2) { if (symbol_level == 1) { _args = [ "--mem_per_link_gb=6" ] } else { @@ -71,6 +70,18 @@ if (concurrent_links == "") { } else if ((is_linux || is_chromeos_lacros) && symbol_level == 0) { # Memory consumption on link without debug symbols is low on linux. _args = [ "--mem_per_link_gb=3" ] + } else if (current_os == "zos") { + _args = [ "--mem_per_link_gb=1" ] + } else if (is_fuchsia) { + # TODO(crbug.com/1347159): This was defaulting to 8GB. The number of + # linker instances to run in parallel is calculated by diviging + # the available memory by this value. On a 32GB machine with + # roughly 29GB of available memory, this would cause three instances + # to run. This started running out of memory and thrashing. This change + # addresses that issue to get the SDk rollers running again but + # could be optimized (maybe to 12GB or for different configs like + # component build). + _args = [ "--mem_per_link_gb=16" ] } else { _args = [] } @@ -101,10 +112,6 @@ if (concurrent_links == "") { } } else { assert(!use_thin_lto, "can't explicitly set concurrent_links with thinlto") - - # Convert the value to a number if it's a string. - concurrent_links = - exec_script("//starboard/build/echo.py", [ concurrent_links ], "value") concurrent_links_logs = [ "concurrent_links set by GN arg (value=$concurrent_links)" ] } diff --git a/build/toolchain/cros/BUILD.gn b/build/toolchain/cros/BUILD.gn index 6d136783d94a..c815e1ab2844 100644 --- a/build/toolchain/cros/BUILD.gn +++ b/build/toolchain/cros/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -7,70 +7,88 @@ import("//build/config/sysroot.gni") import("//build/toolchain/cros_toolchain.gni") import("//build/toolchain/gcc_toolchain.gni") +declare_args() { + # If set, build lacros with Chromium's toolchain instead of with Chrome OS's. + # TODO(thakis): Set this to `= chromeos_is_browser_only` once that works. + lacros_use_chromium_toolchain = false +} + # This is mostly identical to gcc_toolchain, but handles relativizing toolchain # paths. This is needed for CrOS since these paths often change based on the # environment. For example, cxx is a relative path picked up on $PATH in the # chroot. But in Simple Chrome, cxx is a system-absolute path. template("cros_toolchain") { - gcc_toolchain(target_name) { - forward_variables_from(invoker, "*") - - # CrOS's target toolchain wrapper prefers to invoke gomacc itself, so pass - # it the gomacc path via cmd-line arg. Otherwise, for both CrOS's host - # wrapper (used in the ebuild) and Chrome's clang (used in Simple Chrome), - # prepend gomacc like normal. - if (use_goma && toolchain_args.needs_gomacc_path_arg) { - extra_cppflags += " --gomacc-path $goma_dir/gomacc" + if (lacros_use_chromium_toolchain) { + clang_toolchain(target_name) { + forward_variables_from(invoker, "*") } + } else { + gcc_toolchain(target_name) { + forward_variables_from(invoker, "*") - # Relativize path if compiler is specified such that not to lookup from $PATH - # and cc/cxx does not contain additional flags. - if (cc != get_path_info(cc, "file") && string_replace(cc, " ", "") == cc) { - cc = rebase_path(cc, root_build_dir) - } - if (cxx != get_path_info(cxx, "file") && - string_replace(cxx, " ", "") == cxx) { - cxx = rebase_path(cxx, root_build_dir) - } - if (ar != get_path_info(ar, "file") && string_replace(ar, " ", "") == ar) { - ar = rebase_path(ar, root_build_dir) - } - if (ld != get_path_info(ld, "file") && string_replace(ld, " ", "") == ld) { - ld = rebase_path(ld, root_build_dir) + toolchain_args.cc_wrapper = "" + toolchain_args.clang_use_chrome_plugins = false + + # CrOS's target toolchain wrapper prefers to invoke gomacc itself, so pass + # it the gomacc path via cmd-line arg. Otherwise, for both CrOS's host + # wrapper (used in the ebuild) and Chrome's clang (used in Simple Chrome), + # prepend gomacc like normal. + if (use_goma && toolchain_args.needs_gomacc_path_arg) { + extra_cppflags += " --gomacc-path $goma_dir/gomacc" + } + if (use_remoteexec && toolchain_args.needs_gomacc_path_arg) { + extra_cppflags += "--rewrapper-path $rbe_cros_cc_wrapper --rewrapper-cfg ${rbe_cc_cfg_file}" + } + + # Relativize path if compiler is specified such that not to lookup from + # $PATH and cc/cxx does not contain additional flags. + if (cc != get_path_info(cc, "file") && + string_replace(cc, " ", "") == cc) { + cc = rebase_path(cc, root_build_dir) + } + if (cxx != get_path_info(cxx, "file") && + string_replace(cxx, " ", "") == cxx) { + cxx = rebase_path(cxx, root_build_dir) + } + if (ar != get_path_info(ar, "file") && + string_replace(ar, " ", "") == ar) { + ar = rebase_path(ar, root_build_dir) + } + if (ld != get_path_info(ld, "file") && + string_replace(ld, " ", "") == ld) { + ld = rebase_path(ld, root_build_dir) + } } } } # This is the normal toolchain for most targets. cros_toolchain("target") { - ar = cros_target_ar - cc = cros_target_cc - cxx = cros_target_cxx - ld = cros_target_ld - - if (cros_target_nm != "") { - nm = cros_target_nm - } - if (cros_target_readelf != "") { - readelf = cros_target_readelf - } - extra_cflags = cros_target_extra_cflags - extra_cppflags = cros_target_extra_cppflags - extra_cxxflags = cros_target_extra_cxxflags - extra_ldflags = cros_target_extra_ldflags - toolchain_args = { - cc_wrapper = "" - needs_gomacc_path_arg = true - clang_use_chrome_plugins = false current_cpu = target_cpu current_os = "chromeos" - is_clang = is_clang - use_debug_fission = use_debug_fission - use_gold = use_gold - use_sysroot = use_sysroot sysroot = target_sysroot } + + if (!lacros_use_chromium_toolchain) { + ar = cros_target_ar + cc = cros_target_cc + cxx = cros_target_cxx + ld = cros_target_ld + + if (cros_target_nm != "") { + nm = cros_target_nm + } + if (cros_target_readelf != "") { + readelf = cros_target_readelf + } + extra_cflags = cros_target_extra_cflags + extra_cppflags = cros_target_extra_cppflags + extra_cxxflags = cros_target_extra_cxxflags + extra_ldflags = cros_target_extra_ldflags + + toolchain_args.needs_gomacc_path_arg = true + } } # This is a special toolchain needed just for the nacl_bootstrap target in @@ -78,88 +96,115 @@ cros_toolchain("target") { # to ":target" except that it forces use_debug_fission, use_gold, and # use_sysroot off, and allows the user to set different sets of extra flags. cros_toolchain("nacl_bootstrap") { - ar = cros_target_ar - cc = cros_target_cc - cxx = cros_target_cxx - ld = cros_target_ld + toolchain_args = { + if (target_cpu == "arm64") { + current_cpu = "arm" + } else { + current_cpu = target_cpu + } + current_os = "chromeos" + use_debug_fission = false + use_gold = false + use_sysroot = false + } + + if (!lacros_use_chromium_toolchain) { + ar = cros_target_ar + cc = cros_target_cc + cxx = cros_target_cxx + ld = cros_target_ld - if (cros_target_nm != "") { - nm = cros_target_nm + if (cros_target_nm != "") { + nm = cros_target_nm + } + if (cros_target_readelf != "") { + readelf = cros_target_readelf + } + extra_cflags = cros_nacl_bootstrap_extra_cflags + extra_cppflags = cros_nacl_bootstrap_extra_cppflags + extra_cxxflags = cros_nacl_bootstrap_extra_cxxflags + extra_ldflags = cros_nacl_bootstrap_extra_ldflags + + toolchain_args.needs_gomacc_path_arg = true } - if (cros_target_readelf != "") { - readelf = cros_target_readelf + + # We build for ARM32, even when the rest of the build targets ARM64. + if (target_cpu == "arm64") { + ar = cros_nacl_helper_arm32_ar + cc = cros_nacl_helper_arm32_cc + cxx = cros_nacl_helper_arm32_cxx + ld = cros_nacl_helper_arm32_ld + # Avoid accidental use of Arm64 sysroot because of SYSROOT + # env variable set in ChromeOS builds. + toolchain_args.sysroot = cros_nacl_helper_arm32_sysroot } - extra_cflags = cros_nacl_bootstrap_extra_cflags - extra_cppflags = cros_nacl_bootstrap_extra_cppflags - extra_cxxflags = cros_nacl_bootstrap_extra_cxxflags - extra_ldflags = cros_nacl_bootstrap_extra_ldflags +} +# This is a special toolchain needed just for the nacl_helper target for +# building an Arm32 nacl_helper binary on Arm64 ChromeOS targets. +cros_toolchain("nacl_helper_arm32") { toolchain_args = { - cc_wrapper = "" - needs_gomacc_path_arg = true - clang_use_chrome_plugins = false - current_cpu = target_cpu + current_cpu = "arm" current_os = "chromeos" - is_clang = is_clang use_debug_fission = false use_gold = false - use_sysroot = false - } -} + sysroot = cros_nacl_helper_arm32_sysroot -cros_toolchain("host") { - # These are args for the template. - ar = cros_host_ar - cc = cros_host_cc - cxx = cros_host_cxx - ld = cros_host_ld - - if (cros_host_nm != "") { - nm = cros_host_nm + # Disable some uses of libraries that this build does not require. The + # sysroot for this build does not provide them, and they would be pulled in + # by indirect dependencies of nacl_helper otherwise. + use_cras = false + use_nss_certs = false + use_system_libdrm = false + use_system_libsync = false } - if (cros_host_readelf != "") { - readelf = cros_host_readelf + ar = cros_nacl_helper_arm32_ar + cc = cros_nacl_helper_arm32_cc + cxx = cros_nacl_helper_arm32_cxx + ld = cros_nacl_helper_arm32_ld + readelf = cros_nacl_helper_arm32_readelf + + extra_cflags = "" + extra_cppflags = "" + extra_cxxflags = "" + extra_ldflags = "" + + if (!lacros_use_chromium_toolchain) { + toolchain_args.needs_gomacc_path_arg = true } - extra_cflags = cros_host_extra_cflags - extra_cppflags = cros_host_extra_cppflags - extra_cxxflags = cros_host_extra_cxxflags - extra_ldflags = cros_host_extra_ldflags +} +cros_toolchain("host") { toolchain_args = { - cc_wrapper = "" - needs_gomacc_path_arg = false - clang_use_chrome_plugins = false - is_clang = cros_host_is_clang current_cpu = host_cpu current_os = "linux" - use_sysroot = use_sysroot sysroot = cros_host_sysroot } -} -cros_toolchain("v8_snapshot") { - # These are args for the template. - ar = cros_v8_snapshot_ar - cc = cros_v8_snapshot_cc - cxx = cros_v8_snapshot_cxx - ld = cros_v8_snapshot_ld - - if (cros_v8_snapshot_nm != "") { - nm = cros_v8_snapshot_nm - } - if (cros_v8_snapshot_readelf != "") { - readelf = cros_v8_snapshot_readelf + if (!lacros_use_chromium_toolchain) { + # These are args for the template. + ar = cros_host_ar + cc = cros_host_cc + cxx = cros_host_cxx + ld = cros_host_ld + + if (cros_host_nm != "") { + nm = cros_host_nm + } + if (cros_host_readelf != "") { + readelf = cros_host_readelf + } + extra_cflags = cros_host_extra_cflags + extra_cppflags = cros_host_extra_cppflags + extra_cxxflags = cros_host_extra_cxxflags + extra_ldflags = cros_host_extra_ldflags + + toolchain_args.needs_gomacc_path_arg = false } - extra_cflags = cros_v8_snapshot_extra_cflags - extra_cppflags = cros_v8_snapshot_extra_cppflags - extra_cxxflags = cros_v8_snapshot_extra_cxxflags - extra_ldflags = cros_v8_snapshot_extra_ldflags +} +cros_toolchain("v8_snapshot") { toolchain_args = { - cc_wrapper = "" - needs_gomacc_path_arg = false - clang_use_chrome_plugins = false - is_clang = cros_v8_snapshot_is_clang if (target_cpu == "x86" || target_cpu == "arm" || target_cpu == "mipsel") { current_cpu = "x86" } else { @@ -167,7 +212,94 @@ cros_toolchain("v8_snapshot") { } v8_current_cpu = v8_target_cpu current_os = "linux" - use_sysroot = use_sysroot sysroot = cros_v8_snapshot_sysroot } + + if (!lacros_use_chromium_toolchain) { + # These are args for the template. + ar = cros_v8_snapshot_ar + cc = cros_v8_snapshot_cc + cxx = cros_v8_snapshot_cxx + ld = cros_v8_snapshot_ld + + if (cros_v8_snapshot_nm != "") { + nm = cros_v8_snapshot_nm + } + if (cros_v8_snapshot_readelf != "") { + readelf = cros_v8_snapshot_readelf + } + extra_cflags = cros_v8_snapshot_extra_cflags + extra_cppflags = cros_v8_snapshot_extra_cppflags + extra_cxxflags = cros_v8_snapshot_extra_cxxflags + extra_ldflags = cros_v8_snapshot_extra_ldflags + + toolchain_args.needs_gomacc_path_arg = false + } +} + +# This toolchain is used when we want to build Lacros using alternate toolchain. +# To use this, you need to set gn arg 'also_build_lacros_chrome_for_architecture'. +# See build/config/chromeos/ui_mode.gni +if (also_build_lacros_chrome_for_architecture != "") { + cros_toolchain("lacros_clang") { + if (also_build_lacros_chrome_for_architecture == "amd64") { + lacros_args = + read_file("//build/args/chromeos/amd64-generic-crostoolchain.gni", + "scope") + } else if (also_build_lacros_chrome_for_architecture == "arm") { + lacros_args = + read_file("//build/args/chromeos/arm-generic-crostoolchain.gni", + "scope") + } else { + assert(false, + "also_build_lacros_chrome_for_architecture is not " + + "one of the supported architectures.") + } + + toolchain_args = { + forward_variables_from(lacros_args, "*") + + # TODO(crbug.com/1298821) Change to a better way to set gn args. + # The following gn args are present in ash config like + # //build/args/chromeos/atlas.gni but not in + # //build/args/chromeos/amd64-generic-crostoolchain.gni. + # So we need to reset them to the default value where Lacros needs. + # Starts from here. + ozone_auto_platforms = true + ozone_platform = "" + ozone_platform_gbm = -1 + ozone_platform_headless = false + + # Ends here. + + current_os = "chromeos" + target_os = "chromeos" + current_cpu = current_cpu + also_build_lacros_chrome_for_architecture = "" + chromeos_is_browser_only = true + use_clang_coverage = false + } + if (!lacros_use_chromium_toolchain) { + # These are args for the template. + ar = lacros_args.cros_target_ar + cc = lacros_args.cros_target_cc + cxx = lacros_args.cros_target_cxx + ld = lacros_args.cros_target_ld + + if (defined(lacros_args.cros_target_nm) && + lacros_args.cros_target_nm != "") { + nm = lacros_args.cros_target_nm + } + if (defined(lacros_args.cros_target_readelf) && + lacros_args.cros_target_readelf != "") { + readelf = lacros_args.cros_target_readelf + } + extra_cflags = lacros_args.cros_target_extra_cflags + extra_cppflags = lacros_args.cros_target_extra_cppflags + extra_cxxflags = lacros_args.cros_target_extra_cxxflags + extra_ldflags = lacros_args.cros_target_extra_ldflags + + toolchain_args.needs_gomacc_path_arg = true + } + } } diff --git a/build/toolchain/cros_toolchain.gni b/build/toolchain/cros_toolchain.gni index ccc4db293c1f..a2696bd3f4b6 100644 --- a/build/toolchain/cros_toolchain.gni +++ b/build/toolchain/cros_toolchain.gni @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -50,11 +50,9 @@ declare_args() { cros_target_extra_cxxflags = "" cros_target_extra_ldflags = "" - # is_clang is used instead of cros_target_is_clang cros_host_ar = "${clang_base_path}/bin/llvm-ar" cros_host_cc = "${clang_base_path}/bin/clang" cros_host_cxx = "${clang_base_path}/bin/clang++" - cros_host_is_clang = true cros_host_nm = "" cros_host_readelf = "" cros_host_extra_cflags = "" @@ -66,7 +64,6 @@ declare_args() { cros_v8_snapshot_ar = "${clang_base_path}/bin/llvm-ar" cros_v8_snapshot_cc = "${clang_base_path}/bin/clang" cros_v8_snapshot_cxx = "${clang_base_path}/bin/clang++" - cros_v8_snapshot_is_clang = true cros_v8_snapshot_nm = "" cros_v8_snapshot_readelf = "" cros_v8_snapshot_extra_cflags = "" @@ -79,10 +76,17 @@ declare_args() { cros_nacl_bootstrap_extra_cppflags = "" cros_nacl_bootstrap_extra_cxxflags = "" cros_nacl_bootstrap_extra_ldflags = "" + + cros_nacl_helper_arm32_ar = "ar" + cros_nacl_helper_arm32_cc = "gcc" + cros_nacl_helper_arm32_cxx = "g++" + cros_nacl_helper_arm32_readelf = "" + cros_nacl_helper_arm32_sysroot = "" } declare_args() { cros_target_ld = cros_target_cxx cros_host_ld = cros_host_cxx cros_v8_snapshot_ld = cros_v8_snapshot_cxx + cros_nacl_helper_arm32_ld = cros_nacl_helper_arm32_cxx } diff --git a/build/toolchain/fuchsia/BUILD.gn b/build/toolchain/fuchsia/BUILD.gn index d77640b1ad8c..63504ea70a00 100644 --- a/build/toolchain/fuchsia/BUILD.gn +++ b/build/toolchain/fuchsia/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -12,8 +12,9 @@ template("fuchsia_clang_toolchain") { assert(defined(invoker.toolchain_args), "toolchain_args must be defined for fuchsia_clang_toolchain()") - # We want to build and strip binaries, but retain the unstripped binaries - # in runtime_deps to make them available for isolates. + # While we want use stripped binaries on the device, we need to retain the + # unstripped binaries in runtime_deps to make them available for the test + # isolates to enable symbolizing on bots. strip = rebase_path("${clang_base_path}/bin/llvm-strip", root_build_dir) use_unstripped_as_runtime_outputs = true diff --git a/build/toolchain/fuchsia/DIR_METADATA b/build/toolchain/fuchsia/DIR_METADATA index 6d8f079aa581..210aa6a954b8 100644 --- a/build/toolchain/fuchsia/DIR_METADATA +++ b/build/toolchain/fuchsia/DIR_METADATA @@ -1,7 +1 @@ -monorail { - component: "Fuchsia" -} - -team_email: "cr-fuchsia@chromium.org" - -os: FUCHSIA +mixins: "//build/fuchsia/COMMON_METADATA" diff --git a/build/toolchain/fuchsia/OWNERS b/build/toolchain/fuchsia/OWNERS index 3f809e82b19c..e7034eabb1e9 100644 --- a/build/toolchain/fuchsia/OWNERS +++ b/build/toolchain/fuchsia/OWNERS @@ -1 +1 @@ -scottmg@chromium.org +file://build/fuchsia/OWNERS diff --git a/build/toolchain/gcc_link_wrapper.py b/build/toolchain/gcc_link_wrapper.py index b70de8b31c26..5c08a7e4a00c 100755 --- a/build/toolchain/gcc_link_wrapper.py +++ b/build/toolchain/gcc_link_wrapper.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2015 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -68,13 +68,10 @@ def main(): exe_file = args.output if args.unstripped_file: exe_file = args.unstripped_file - # Suppress output here because it doesn't seem to be useful. The most - # common error is a segfault, which will happen if files are missing. - with open(os.devnull, "w") as devnull: - dwp_proc = subprocess.Popen(wrapper_utils.CommandToRun( - [args.dwp, '-e', exe_file, '-o', exe_file + '.dwp']), - stdout=devnull, - stderr=subprocess.STDOUT) + # Suppress warnings about duplicate CU entries (https://crbug.com/1264130) + dwp_proc = subprocess.Popen(wrapper_utils.CommandToRun( + [args.dwp, '-e', exe_file, '-o', exe_file + '.dwp']), + stderr=subprocess.DEVNULL) # Finally, strip the linked executable (if desired). if args.strip: diff --git a/build/toolchain/gcc_solink_wrapper.py b/build/toolchain/gcc_solink_wrapper.py index 39aef4d1e989..03ef042618f5 100755 --- a/build/toolchain/gcc_solink_wrapper.py +++ b/build/toolchain/gcc_solink_wrapper.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2015 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -21,6 +21,8 @@ def CollectSONAME(args): """Replaces: readelf -d $sofile | grep SONAME""" + # TODO(crbug.com/1259067): Come up with a way to get this info without having + # to bundle readelf in the toolchain package. toc = '' readelf = subprocess.Popen(wrapper_utils.CommandToRun( [args.readelf, '-d', args.sofile]), @@ -80,6 +82,13 @@ def InterceptFlag(flag, command): return ret +def SafeDelete(path): + try: + os.unlink(path) + except OSError: + pass + + def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--readelf', @@ -120,11 +129,11 @@ def main(): # Extract flags passed through ldflags but meant for this script. # https://crbug.com/954311 tracks finding a better way to plumb these. - link_only = InterceptFlag('--link-only', args.command) + partitioned_library = InterceptFlag('--partitioned-library', args.command) collect_inputs_only = InterceptFlag('--collect-inputs-only', args.command) - # If only linking, we are likely generating a partitioned .so that will be - # split apart later. In that case: + # Partitioned .so libraries are used only for splitting apart in a subsequent + # step. # # - The TOC file optimization isn't useful, because the partition libraries # must always be re-extracted if the combined library changes (and nothing @@ -136,20 +145,21 @@ def main(): # tools would need to be updated to handle and/or not complain about # partitioned libraries. Instead, to keep Ninja happy, simply create dummy # files for the TOC and stripped lib. - if link_only or collect_inputs_only: + if collect_inputs_only or partitioned_library: open(args.output, 'w').close() open(args.tocfile, 'w').close() - if args.dwp: - open(args.sofile + '.dwp', 'w').close() # Instead of linking, records all inputs to a file. This is used by # enable_resource_allowlist_generation in order to avoid needing to # link (which is slow) to build the resources allowlist. if collect_inputs_only: - with open(args.sofile, 'w') as f: - CollectInputs(f, args.command) if args.map_file: open(args.map_file, 'w').close() + if args.dwp: + open(args.sofile + '.dwp', 'w').close() + + with open(args.sofile, 'w') as f: + CollectInputs(f, args.command) return 0 # First, run the actual link. @@ -158,37 +168,40 @@ def main(): env=fast_env, map_file=args.map_file) - if result != 0 or link_only: + if result != 0: return result # If dwp is set, then package debug info for this SO. dwp_proc = None if args.dwp: - # Suppress output here because it doesn't seem to be useful. The most - # common error is a segfault, which will happen if files are missing. - with open(os.devnull, "w") as devnull: - dwp_proc = subprocess.Popen(wrapper_utils.CommandToRun( - [args.dwp, '-e', args.sofile, '-o', args.sofile + '.dwp']), - stdout=devnull, - stderr=subprocess.STDOUT) - - # Next, generate the contents of the TOC file. - result, toc = CollectTOC(args) - if result != 0: - return result - - # If there is an existing TOC file with identical contents, leave it alone. - # Otherwise, write out the TOC file. - UpdateTOC(args.tocfile, toc) - - # Finally, strip the linked shared object file (if desired). - if args.strip: - result = subprocess.call(wrapper_utils.CommandToRun( - [args.strip, '-o', args.output, args.sofile])) + # Explicit delete to account for symlinks (when toggling between + # debug/release). + SafeDelete(args.sofile + '.dwp') + # Suppress warnings about duplicate CU entries (https://crbug.com/1264130) + dwp_proc = subprocess.Popen(wrapper_utils.CommandToRun( + [args.dwp, '-e', args.sofile, '-o', args.sofile + '.dwp']), + stderr=subprocess.DEVNULL) + + if not partitioned_library: + # Next, generate the contents of the TOC file. + result, toc = CollectTOC(args) + if result != 0: + return result + + # If there is an existing TOC file with identical contents, leave it alone. + # Otherwise, write out the TOC file. + UpdateTOC(args.tocfile, toc) + + # Finally, strip the linked shared object file (if desired). + if args.strip: + result = subprocess.call( + wrapper_utils.CommandToRun( + [args.strip, '-o', args.output, args.sofile])) if dwp_proc: dwp_result = dwp_proc.wait() if dwp_result != 0: + sys.stderr.write('dwp failed with error code {}\n'.format(dwp_result)) return dwp_result return result diff --git a/build/toolchain/gcc_toolchain.gni b/build/toolchain/gcc_toolchain.gni index 72153f943245..70a79d32c623 100644 --- a/build/toolchain/gcc_toolchain.gni +++ b/build/toolchain/gcc_toolchain.gni @@ -1,10 +1,11 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/config/clang/clang.gni") import("//build/config/compiler/compiler.gni") import("//build/config/coverage/coverage.gni") +import("//build/config/rust.gni") import("//build/config/sanitizers/sanitizers.gni") import("//build/config/v8_target_cpu.gni") import("//build/toolchain/cc_wrapper.gni") @@ -29,9 +30,12 @@ declare_args() { # builds. Requires debug info. enable_resource_allowlist_generation = is_official_build && - # Don't enable for Android-on-Chrome OS so that they can build with - # symbol_level=0 without this failing (crbug.com/891164). + # Don't enable for Android-on-Chrome OS. (target_os == "android" || target_os == "win") + + # Use -MD instead of -MMD for compiler commands. This is useful for tracking + # the comprehensive set of dependencies. + system_headers_in_deps = false } # When the arg is set via args.gn, it applies to all toolchains. In order to not @@ -42,8 +46,6 @@ if ((is_linux || is_chromeos) && target_os == "android") { # Ensure enable_resource_allowlist_generation is enabled only when it will work. if (enable_resource_allowlist_generation) { - assert(symbol_level >= 1, - "enable_resource_allowlist_generation=true requires symbol_level >= 1") assert( !strip_debug_info, "enable_resource_allowlist_generation=true requires strip_debug_info=false") @@ -110,15 +112,10 @@ if (enable_resource_allowlist_generation) { # all shared libraries and executables as they are built. The pre-stripped # artifacts will be put in lib.unstripped/ and exe.unstripped/. # -# Optional parameters added with the Starboard platform: -# -# - tail_lib_dependencies -# If defined, this string will be added to the compilation line after all -# other libs are specified. -# - using_snarl_linker -# Defining this string will ensure static linker flags are passed in a way -# that the snarl tool will accept. -template("gcc_toolchain") { +# Callers will normally want to invoke "gcc_toolchain" instead, which makes +# a toolchain just like this but may additionally create an extra toolchain +# without sanitizers for host-side tools. +template("single_gcc_toolchain") { toolchain(target_name) { is_starboard_toolchain = target_name == "starboard" if (!sb_is_modular || sb_is_evergreen) { @@ -143,9 +140,15 @@ template("gcc_toolchain") { assert(defined(invoker.toolchain_args), "Toolchains must specify toolchain_args") invoker_toolchain_args = invoker.toolchain_args - assert(is_starboard || defined(invoker_toolchain_args.current_cpu), + if (is_starboard && !defined(invoker_toolchain_args.current_cpu)) { + invoker_toolchain_args.current_cpu = target_cpu + } + if (is_starboard && !defined(invoker_toolchain_args.current_os)) { + invoker_toolchain_args.current_os = target_os + } + assert(defined(invoker_toolchain_args.current_cpu), "toolchain_args must specify a current_cpu") - assert(is_starboard || defined(invoker_toolchain_args.current_os), + assert(defined(invoker_toolchain_args.current_os), "toolchain_args must specify a current_os") # When invoking this toolchain not as the default one, these args will be @@ -162,20 +165,29 @@ template("gcc_toolchain") { if (!use_cobalt_customizations) { host_toolchain = host_toolchain + # The same applies to the toolchain we use to build Rust procedural + # macros, which is probably the same but might have sanitizers disabled. + host_toolchain_no_sanitizers = host_toolchain_no_sanitizers + if (!defined(invoker_toolchain_args.v8_current_cpu)) { v8_current_cpu = invoker_toolchain_args.current_cpu } } } - # When the invoker has explicitly overridden user_rbe, use_goma or + # When the invoker has explicitly overridden use_remoteexec, use_goma or # cc_wrapper in the toolchain args, use those values, otherwise default # to the global one. This works because the only reasonable override # that toolchains might supply for these values are to force-disable them. - if (defined(toolchain_args.use_rbe)) { - toolchain_uses_rbe = toolchain_args.use_rbe + if (defined(toolchain_args.use_remoteexec)) { + toolchain_uses_remoteexec = toolchain_args.use_remoteexec + } else { + toolchain_uses_remoteexec = use_remoteexec + } + if (defined(toolchain_args.use_remoteexec_links)) { + toolchain_uses_remoteexec_links = toolchain_args.use_remoteexec_links } else { - toolchain_uses_rbe = use_rbe + toolchain_uses_remoteexec_links = use_remoteexec_links } if (defined(toolchain_args.use_goma)) { toolchain_uses_goma = toolchain_args.use_goma @@ -195,10 +207,10 @@ template("gcc_toolchain") { } else { toolchain_cc_wrapper = cc_wrapper } - assert(!(toolchain_uses_rbe && toolchain_uses_goma), - "Goma and RBE can't be used together.") - assert(!(toolchain_cc_wrapper != "" && toolchain_uses_rbe), - "RBE and cc_wrapper can't be used together.") + assert(!(toolchain_uses_remoteexec && toolchain_uses_goma), + "Goma and re-client can't be used together.") + assert(!(toolchain_cc_wrapper != "" && toolchain_uses_remoteexec), + "re-client and cc_wrapper can't be used together.") assert(!(toolchain_cc_wrapper != "" && toolchain_uses_goma), "Goma and cc_wrapper can't be used together.") @@ -210,32 +222,46 @@ template("gcc_toolchain") { # wrapper will have picked up gomacc via cmd-line arg. So need to prepend # gomacc in that case. goma_path = "$goma_dir/gomacc" - if (toolchain_uses_rbe) { + if (toolchain_uses_remoteexec && + (!defined(invoker_toolchain_args.needs_gomacc_path_arg) || + !invoker_toolchain_args.needs_gomacc_path_arg)) { if (defined(toolchain_args.rbe_cc_cfg_file)) { toolchain_rbe_cc_cfg_file = toolchain_args.rbe_cc_cfg_file } else { toolchain_rbe_cc_cfg_file = rbe_cc_cfg_file } - # C/C++ (clang) rewrapper prefix to use when use_rbe is true. + # C/C++ (clang) rewrapper prefix to use when use_remoteexec is true. compiler_prefix = "${rbe_bin_dir}/rewrapper -cfg=${toolchain_rbe_cc_cfg_file} -exec_root=${rbe_exec_root} " } else if (toolchain_uses_goma && (!defined(invoker_toolchain_args.needs_gomacc_path_arg) || !invoker_toolchain_args.needs_gomacc_path_arg)) { compiler_prefix = "${goma_path} " + if (use_goma_rust) { + rust_compiler_prefix = compiler_prefix + } } else { compiler_prefix = "${toolchain_cc_wrapper} " } - if (use_goma_thin_lto && toolchain_uses_goma && use_thin_lto) { - # goma_ld.py uses autoninja in an attempt to set a reasonable + + if (toolchain_uses_remoteexec_links) { + if (defined(toolchain_args.rbe_link_cfg_file)) { + toolchain_rbe_link_cfg_file = toolchain_args.rbe_link_cfg_file + } else { + toolchain_rbe_link_cfg_file = rbe_link_cfg_file + } + link_prefix = "${rbe_bin_dir}/rewrapper -cfg=${toolchain_rbe_link_cfg_file} -exec_root=${rbe_exec_root} " + not_needed([ "goma_path" ]) + } else if (use_goma_thin_lto && toolchain_uses_goma && use_thin_lto) { + # remote_ld.py uses autoninja in an attempt to set a reasonable # number of jobs, but this results in too low a value on # Chrome OS builders. So we pass in an explicit value. - goma_ld = - "$python_path " + - rebase_path("//tools/clang/scripts/goma_ld.py", root_build_dir) + - " --gomacc ${goma_path} --jobs 200 -- " + link_prefix = + "\"$python_path\" " + + rebase_path("//tools/clang/scripts/remote_ld.py", root_build_dir) + + " --wrapper ${goma_path} --jobs 200 -- " } else { - goma_ld = "" + link_prefix = "" not_needed([ "goma_path" ]) } @@ -270,7 +296,8 @@ template("gcc_toolchain") { # The wrapper needs to know what OS we target because it uses that to # select a list of files that should not be instrumented. - _coverage_wrapper = _coverage_wrapper + " --target-os=" + target_os + _coverage_wrapper = _coverage_wrapper + " --target-os=" + + invoker_toolchain_args.current_os # We want to instrument everything if there is no input file set. # If there is a file we need to give it to the wrapper script so it can @@ -281,14 +308,15 @@ template("gcc_toolchain") { rebase_path(toolchain_coverage_instrumentation_input_file, root_build_dir) } - compiler_prefix = "$python_path ${_coverage_wrapper} " + compiler_prefix + compiler_prefix = + "\"$python_path\" ${_coverage_wrapper} " + compiler_prefix } - cc = "$compiler_prefix\"${invoker.cc}\"" - cxx = "$compiler_prefix\"${invoker.cxx}\"" - asm = "$asm_prefix\"${invoker.cc}\"" + cc = compiler_prefix + invoker.cc + cxx = compiler_prefix + invoker.cxx + asm = asm_prefix + invoker.cc ar = invoker.ar - ld = "$goma_ld${invoker.ld}" + ld = link_prefix + invoker.ld if (defined(invoker.readelf)) { readelf = invoker.readelf } else { @@ -354,6 +382,12 @@ template("gcc_toolchain") { extra_ldflags = "" } + if (system_headers_in_deps) { + md = "-MD" + } else { + md = "-MMD" + } + enable_linker_map = defined(invoker.enable_linker_map) && invoker.enable_linker_map && generate_linker_map @@ -373,7 +407,7 @@ template("gcc_toolchain") { tool("cc") { depfile = "{{output}}.d" precompiled_header_type = "gcc" - command = "$cc -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}${extra_cppflags}${extra_cflags} -c {{source}} -o {{output}}" + command = "$cc $md -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}${extra_cppflags}${extra_cflags} -c {{source}} -o {{output}}" depsformat = "gcc" description = "CC {{output}}" outputs = [ "$object_subdir/{{source_name_part}}.o" ] @@ -382,7 +416,7 @@ template("gcc_toolchain") { tool("cxx") { depfile = "{{output}}.d" precompiled_header_type = "gcc" - command = "$cxx -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}}${extra_cppflags}${extra_cxxflags} -c {{source}} -o {{output}}" + command = "$cxx $md -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}}${extra_cppflags}${extra_cxxflags} -c {{source}} -o {{output}}" depsformat = "gcc" description = "CXX {{output}}" outputs = [ "$object_subdir/{{source_name_part}}.o" ] @@ -396,9 +430,9 @@ template("gcc_toolchain") { # now to add cflags for evergreen platforms but we haven't yet decided # whether cflags should be added here for all platforms. if (!is_starboard_toolchain && is_starboard && sb_is_modular) { - command = "$asm -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{asmflags}}${extra_asmflags} -c {{source}} -o {{output}}" + command = "$asm $md -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{asmflags}}${extra_asmflags} -c {{source}} -o {{output}}" } else { - command = "$asm -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{asmflags}}${extra_asmflags} -c {{source}} -o {{output}}" + command = "$asm $md -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{asmflags}}${extra_asmflags} -c {{source}} -o {{output}}" } depsformat = "gcc" @@ -407,11 +441,6 @@ template("gcc_toolchain") { } tool("alink") { - if (defined(invoker.using_snarl_linker) && invoker.using_snarl_linker) { - rspfile = "{{output}}.rsp" - rspfile_content = "{{inputs_newline}}" - command = "\"$ar\" {{arflags}} rcsD {{output}} @\"$rspfile\"" - } else if (current_os == "aix") { # AIX does not support either -D (deterministic output) or response # files. @@ -427,7 +456,7 @@ template("gcc_toolchain") { if (host_os == "win") { tool_wrapper_path = rebase_path("//build/toolchain/win/tool_wrapper.py", root_build_dir) - command = "cmd /c $python_path $tool_wrapper_path delete-file {{output}} && $command" + command = "cmd /s /c \"\"$python_path\" $tool_wrapper_path delete-file {{output}} && $command\"" } else { command = "rm -f {{output}} && $command" } @@ -448,6 +477,7 @@ template("gcc_toolchain") { soname = "{{target_output_name}}{{output_extension}}" # e.g. "libfoo.so". sofile = "{{output_dir}}/$soname" # Possibly including toolchain dir. rspfile = sofile + ".rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" if (defined(invoker.strip)) { @@ -462,7 +492,12 @@ template("gcc_toolchain") { # .TOC file, overwrite it, otherwise, don't change it. tocfile = sofile + ".TOC" - link_command = "$ld -shared -Wl,-soname=\"$soname\" {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" @\"$rspfile\"" + soname_flag = "" + if (current_os != "aix") { + # -soname flag is not available on aix ld + soname_flag = "-Wl,-soname=\"$soname\"" + } + link_command = "$ld -shared $soname_flag {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" @\"$rspfile\" {{rlibs}}" # Generate a map file to be used for binary size analysis. # Map file adds ~10% to the link time on a z620. @@ -485,12 +520,23 @@ template("gcc_toolchain") { # The host might not have a POSIX shell and utilities (e.g. Windows). solink_wrapper = rebase_path("//build/toolchain/gcc_solink_wrapper.py", root_build_dir) - command = "$python_path \"$solink_wrapper\" --readelf=\"$readelf\" --nm=\"$nm\" $strip_switch$dwp_switch --sofile=\"$unstripped_sofile\" --tocfile=\"$tocfile\"$map_switch --output=\"$sofile\" -- $link_command" + solink_extra_flags = "" + if (current_os == "aix") { + # to be intercepted by solink_wrapper, so that we exit immediately + # after linking the shared object, without generating the TOC file + # (skipped on Aix) + solink_extra_flags = "--partitioned-library" + } + command = "\"$python_path\" \"$solink_wrapper\" --readelf=\"$readelf\" --nm=\"$nm\" $strip_switch$dwp_switch --sofile=\"$unstripped_sofile\" --tocfile=\"$tocfile\"$map_switch --output=\"$sofile\" -- $link_command $solink_extra_flags" if (target_cpu == "mipsel" && is_component_build && is_android) { - rspfile_content = "-Wl,--start-group -Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}} -Wl,--end-group$tail_lib_dependencies" + rspfile_content = "-Wl,--start-group -Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}} -Wl,--end-group" + } else if (current_os == "aix") { + # --whole-archive, --no-whole-archive flags are not available on the aix + # ld. + rspfile_content = "{{inputs}} {{solibs}} {{libs}}" } else { - rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}}$tail_lib_dependencies" + rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}}" } description = "SOLINK $sofile" @@ -544,6 +590,7 @@ template("gcc_toolchain") { soname = "{{target_output_name}}{{output_extension}}" # e.g. "libfoo.so". sofile = "{{output_dir}}/$soname" rspfile = sofile + ".rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" if (defined(invoker.strip)) { @@ -552,13 +599,23 @@ template("gcc_toolchain") { unstripped_sofile = sofile } - command = "$ld -shared {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" -Wl,-soname=\"$soname\" @\"$rspfile\"" + soname_flag = "" + whole_archive_flag = "" + no_whole_archive_flag = "" + if (current_os != "aix") { + # -soname, --whole-archive, --no-whole-archive flags are not available + # on aix ld + soname_flag = "-Wl,-soname=\"$soname\"" + whole_archive_flag = "-Wl,--whole-archive" + no_whole_archive_flag = "-Wl,--no-whole-archive" + } + command = "$ld -shared {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" $soname_flag @\"$rspfile\"" if (defined(invoker.strip)) { strip_command = "${invoker.strip} -o \"$sofile\" \"$unstripped_sofile\"" command += " && " + strip_command } - rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}}$tail_lib_dependencies" + rspfile_content = "$whole_archive_flag {{inputs}} {{solibs}} $no_whole_archive_flag {{libs}} {{rlibs}}" description = "SOLINK_MODULE $sofile" @@ -590,6 +647,7 @@ template("gcc_toolchain") { outfile = "{{output_dir}}/$exename" rspfile = "$outfile.rsp" unstripped_outfile = outfile + pool = "//build/toolchain:link_pool($default_toolchain)" # Use this for {{output_extension}} expansions unless a target manually @@ -606,7 +664,8 @@ template("gcc_toolchain") { start_group_flag = "" end_group_flag = "" if (current_os != "aix") { - # the "--start-group .. --end-group" feature isn't available on the aix ld. + # the "--start-group .. --end-group" feature isn't available on the aix + # ld. start_group_flag = "-Wl,--start-group" end_group_flag = "-Wl,--end-group " } @@ -638,9 +697,10 @@ template("gcc_toolchain") { link_wrapper = rebase_path("//build/toolchain/gcc_link_wrapper.py", root_build_dir) - command = "$python_path \"$link_wrapper\" --output=\"$outfile\"$strip_switch$map_switch$dwp_switch -- $link_command" + command = "\"$python_path\" \"$link_wrapper\" --output=\"$outfile\"$strip_switch$map_switch$dwp_switch -- $link_command" description = "LINK $outfile" + rspfile_content = "{{inputs}}" outputs = [ outfile ] if (outfile != unstripped_outfile) { @@ -685,7 +745,150 @@ template("gcc_toolchain") { pool = "//build/toolchain:action_pool($default_toolchain)" } - forward_variables_from(invoker, [ "deps" ]) + if (toolchain_has_rust) { + if (!defined(rust_compiler_prefix)) { + rust_compiler_prefix = "" + } + rustc_bin = rebase_path("${rust_sysroot}/bin/rustc", root_build_dir) + rustc = "$rust_compiler_prefix${rustc_bin}" + rust_sysroot_relative_to_out = rebase_path(rust_sysroot, root_out_dir) + rustc_wrapper = rebase_path("//build/rust/rustc_wrapper.py") + + # RSP manipulation due to https://bugs.chromium.org/p/gn/issues/detail?id=249 + tool("rust_staticlib") { + libname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + rspfile = "$libname.rsp" + depfile = "$libname.d" + + default_output_extension = ".a" + output_prefix = "lib" + default_output_dir = "{{root_out_dir}}" + description = "RUST(STATICLIB) {{output}}" + outputs = [ libname ] + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $libname LDFLAGS RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_rlib") { + # We must always prefix with `lib` even if the library already starts + # with that prefix or else our stdlib is unable to find libc.rlib (or + # actually liblibc.rlib). + rlibname = + "{{output_dir}}/lib{{target_output_name}}{{output_extension}}" + depfile = "$rlibname.d" + + # Do not use rsp files in this (common) case because they occupy the + # ninja main thread, and {{rlibs}} have shorter command lines than + # fully linked targets. + + default_output_extension = ".rlib" + + # This is prefixed unconditionally in `rlibname`. + # output_prefix = "lib" + default_output_dir = "{{root_out_dir}}" + description = "RUST {{output}}" + outputs = [ rlibname ] + + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args {{rustdeps}} {{externs}} --emit=dep-info=$depfile,link -o $rlibname LDFLAGS RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_bin") { + exename = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + depfile = "$exename.d" + rspfile = "$exename.rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + default_output_extension = default_executable_extension + default_output_dir = "{{root_out_dir}}" + description = "RUST(BIN) {{output}}" + outputs = [ exename ] + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $exename LDFLAGS {{ldflags}} ${extra_ldflags} RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_cdylib") { + dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + depfile = "$dllname.d" + rspfile = "$dllname.rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + default_output_extension = default_shlib_extension + output_prefix = "lib" + default_output_dir = "{{root_out_dir}}${default_shlib_subdir}" + description = "RUST(CDYLIB) {{output}}" + outputs = [ dllname ] + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} ${extra_ldflags} RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_macro") { + dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + depfile = "$dllname.d" + rspfile = "$dllname.rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + default_output_extension = default_shlib_extension + output_prefix = "lib" + default_output_dir = "{{root_out_dir}}${default_shlib_subdir}" + description = "RUST(MACRO) {{output}}" + outputs = [ dllname ] + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} ${extra_ldflags} RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + } + + forward_variables_from(invoker, + [ + "deps", + "propagates_configs", + ]) + } +} + +# Makes a single GCC toolchain, or possibly two if we need +# an equivalent toolchain without sanitizers. +template("gcc_toolchain") { + single_gcc_toolchain(target_name) { + assert(defined(invoker.toolchain_args), + "Toolchains must declare toolchain_args") + forward_variables_from(invoker, + "*", + [ + "visibility", + "test_only", + ]) + + # No need to forward visibility and test_only as they apply to targets not + # toolchains, but presubmit checks require that we explicitly exclude them + } + + if (using_sanitizer) { + # Make an additional toolchain with no sanitizers. + single_gcc_toolchain("${target_name}_no_sanitizers") { + assert(defined(invoker.toolchain_args), + "Toolchains must declare toolchain_args") + forward_variables_from(invoker, + "*", + [ + "toolchain_args", + "visibility", + "test_only", + ]) + toolchain_args = { + # Populate toolchain args from the invoker. + forward_variables_from(invoker.toolchain_args, "*") + toolchain_disables_sanitizers = true + } + } } } @@ -697,29 +900,35 @@ template("gcc_toolchain") { # actually just be doing a native compile. The invoker can optionally override # use_gold too. template("clang_toolchain") { - if (defined(invoker.toolprefix)) { - toolprefix = invoker.toolprefix - } else { - toolprefix = "" - } if (is_starboard) { clang_base_path = invoker.clang_base_path } - gcc_toolchain(target_name) { - prefix = rebase_path("$clang_base_path/bin", root_build_dir) - cc = "$prefix/clang" - cxx = "$prefix/clang++" + _path = "$clang_base_path/bin" + _is_path_absolute = get_path_info(_path, "abspath") == _path + + # Preserve absolute paths for tools like distcc. + if (_is_path_absolute && filter_include([ _path ], [ "//*" ]) == []) { + prefix = _path + } else { + prefix = rebase_path(_path, root_build_dir) + } + + cc = "${prefix}/clang" + cxx = "${prefix}/clang++" ld = cxx - readelf = "${toolprefix}readelf" + readelf = "${prefix}/llvm-readelf" ar = "${prefix}/llvm-ar" - nm = "nm" + nm = "${prefix}/llvm-nm" forward_variables_from(invoker, [ "strip", "default_shlib_subdir", + "dwp", "enable_linker_map", + "loadable_module_extension", + "propagates_configs", "use_unstripped_as_runtime_outputs", ]) diff --git a/build/toolchain/get_concurrent_links.py b/build/toolchain/get_concurrent_links.py index d77bb4375ea5..47f009362d7c 100755 --- a/build/toolchain/get_concurrent_links.py +++ b/build/toolchain/get_concurrent_links.py @@ -1,13 +1,11 @@ -#!/usr/bin/env python -# Copyright 2014 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # This script computs the number of concurrent links we want to run in the build # as a function of machine spec. It's based on GetDefaultConcurrentLinks in GYP. -from __future__ import print_function - import argparse import multiprocessing import os @@ -58,14 +56,21 @@ class MEMORYSTATUSEX(ctypes.Structure): def _GetDefaultConcurrentLinks(per_link_gb, reserve_gb, thin_lto_type, - secondary_per_link_gb): + secondary_per_link_gb, override_ram_in_gb): explanation = [] explanation.append( 'per_link_gb={} reserve_gb={} secondary_per_link_gb={}'.format( per_link_gb, reserve_gb, secondary_per_link_gb)) - mem_total_gb = float(_GetTotalMemoryInBytes()) / 2**30 - mem_total_gb = max(0, mem_total_gb - reserve_gb) - mem_cap = int(max(1, mem_total_gb / per_link_gb)) + if override_ram_in_gb: + mem_total_gb = override_ram_in_gb + else: + mem_total_gb = float(_GetTotalMemoryInBytes()) / 2**30 + adjusted_mem_total_gb = max(0, mem_total_gb - reserve_gb) + + # Ensure that there is at least as many links allocated for the secondary as + # there is for the primary. The secondary link usually uses fewer gbs. + mem_cap = int( + max(1, adjusted_mem_total_gb / (per_link_gb + secondary_per_link_gb))) try: cpu_count = multiprocessing.cpu_count() @@ -82,8 +87,9 @@ def _GetDefaultConcurrentLinks(per_link_gb, reserve_gb, thin_lto_type, assert thin_lto_type == 'local' cpu_cap = min(cpu_count, 6) - explanation.append('cpu_count={} cpu_cap={} mem_total_gb={:.1f}GiB'.format( - cpu_count, cpu_cap, mem_total_gb)) + explanation.append( + 'cpu_count={} cpu_cap={} mem_total_gb={:.1f}GiB adjusted_mem_total_gb={:.1f}GiB' + .format(cpu_count, cpu_cap, mem_total_gb, adjusted_mem_total_gb)) num_links = min(mem_cap, cpu_cap) if num_links == cpu_cap: @@ -94,15 +100,26 @@ def _GetDefaultConcurrentLinks(per_link_gb, reserve_gb, thin_lto_type, else: reason = 'RAM' + # static link see too many open files if we have many concurrent links. + # ref: http://b/233068481 + if num_links > 30: + num_links = 30 + reason = 'nofile' + explanation.append('concurrent_links={} (reason: {})'.format( num_links, reason)) - # See if there is RAM leftover for a secondary pool. - if secondary_per_link_gb and num_links == mem_cap: - mem_remaining = mem_total_gb - mem_cap * per_link_gb + # Use remaining RAM for a secondary pool if needed. + if secondary_per_link_gb: + mem_remaining = adjusted_mem_total_gb - num_links * per_link_gb secondary_size = int(max(0, mem_remaining / secondary_per_link_gb)) - explanation.append('secondary_size={} (mem_remaining={:.1f}GiB)'.format( - secondary_size, mem_remaining)) + if secondary_size > cpu_count: + secondary_size = cpu_count + reason = 'cpu_count' + else: + reason = 'mem_remaining={:.1f}GiB'.format(mem_remaining) + explanation.append('secondary_size={} (reason: {})'.format( + secondary_size, reason)) else: secondary_size = 0 @@ -114,19 +131,25 @@ def main(): parser.add_argument('--mem_per_link_gb', type=int, default=8) parser.add_argument('--reserve_mem_gb', type=int, default=0) parser.add_argument('--secondary_mem_per_link', type=int, default=0) + parser.add_argument('--override-ram-in-gb-for-testing', type=float, default=0) parser.add_argument('--thin-lto') options = parser.parse_args() primary_pool_size, secondary_pool_size, explanation = ( _GetDefaultConcurrentLinks(options.mem_per_link_gb, options.reserve_mem_gb, options.thin_lto, - options.secondary_mem_per_link)) - sys.stdout.write( - gn_helpers.ToGNString({ - 'primary_pool_size': primary_pool_size, - 'secondary_pool_size': secondary_pool_size, - 'explanation': explanation, - })) + options.secondary_mem_per_link, + options.override_ram_in_gb_for_testing)) + if options.override_ram_in_gb_for_testing: + print('primary={} secondary={} explanation={}'.format( + primary_pool_size, secondary_pool_size, explanation)) + else: + sys.stdout.write( + gn_helpers.ToGNString({ + 'primary_pool_size': primary_pool_size, + 'secondary_pool_size': secondary_pool_size, + 'explanation': explanation, + })) return 0 diff --git a/build/toolchain/get_cpu_count.py b/build/toolchain/get_cpu_count.py index 765c7c78f6bc..f7cf9511d707 100644 --- a/build/toolchain/get_cpu_count.py +++ b/build/toolchain/get_cpu_count.py @@ -1,10 +1,9 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # This script shows cpu count to specify capacity of action pool. -from __future__ import print_function import multiprocessing import sys diff --git a/build/toolchain/get_goma_dir.py b/build/toolchain/get_goma_dir.py index 114da6c0ea29..14c9d5b6c1a9 100644 --- a/build/toolchain/get_goma_dir.py +++ b/build/toolchain/get_goma_dir.py @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/toolchain/goma.gni b/build/toolchain/goma.gni index 56787f1b9bcf..9e0e5476ee85 100644 --- a/build/toolchain/goma.gni +++ b/build/toolchain/goma.gni @@ -1,4 +1,4 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -24,11 +24,6 @@ if (use_goma && goma_dir == "") { declare_args() { # TODO(crbug.com/726475): true if use_goma = true in the future. use_java_goma = false - - # Deprecated and ignored as Goma RBE is now the default. Still exists - # to avoid breaking the build on the bots. Will be removed when all - # bots have been configured to not set this variable. - ios_use_goma_rbe = -1 } assert(!is_win || !use_goma || is_clang, diff --git a/build/toolchain/ios/BUILD.gn b/build/toolchain/ios/BUILD.gn index bc7352911914..5623a84f5001 100644 --- a/build/toolchain/ios/BUILD.gn +++ b/build/toolchain/ios/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2021 The Chromium Authors. All rights reserved. +# Copyright 2021 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -16,12 +16,12 @@ template("ios_toolchain") { sdk_developer_dir = ios_sdk_developer_dir deployment_target = ios_deployment_target - sdk_name = ios_sdk_name + target_environment = target_environment bin_path = ios_bin_path - strip_with_lipo = true toolchain_args = { forward_variables_from(invoker.toolchain_args, "*") + xcode_build = xcode_build current_os = "ios" } } @@ -33,10 +33,10 @@ ios_toolchain("ios_clang_arm64") { } } -ios_toolchain("ios_clang_arm64_14_0") { +ios_toolchain("ios_clang_arm64_13_4") { toolchain_args = { current_cpu = "arm64" - ios_deployment_target = "14.0" + ios_deployment_target = "13.4" } } @@ -46,24 +46,16 @@ ios_toolchain("ios_clang_arm") { } } -ios_toolchain("ios_clang_arm_fat") { - toolchain_args = { - current_cpu = "arm" - is_fat_secondary_toolchain = true - primary_fat_toolchain_name = "//build/toolchain/ios:ios_clang_arm64" - } -} - ios_toolchain("ios_clang_x64") { toolchain_args = { current_cpu = "x64" } } -ios_toolchain("ios_clang_x64_14_0") { +ios_toolchain("ios_clang_x64_13_4") { toolchain_args = { current_cpu = "x64" - ios_deployment_target = "14.0" + ios_deployment_target = "13.4" } } @@ -72,11 +64,3 @@ ios_toolchain("ios_clang_x86") { current_cpu = "x86" } } - -ios_toolchain("ios_clang_x86_fat") { - toolchain_args = { - current_cpu = "x86" - is_fat_secondary_toolchain = true - primary_fat_toolchain_name = "//build/toolchain/ios:ios_clang_x64" - } -} diff --git a/build/toolchain/ios/compile_xcassets.py b/build/toolchain/ios/compile_xcassets.py index 43201bc66c8c..418bde507b6c 100644 --- a/build/toolchain/ios/compile_xcassets.py +++ b/build/toolchain/ios/compile_xcassets.py @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -30,23 +30,6 @@ # Name of the section containing informational messages that can be ignored. NOTICE_SECTION = 'com.apple.actool.compilation-results' -# Regular expressions matching spurious messages from actool that should be -# ignored (as they are bogus). Generally a bug should be filed with Apple -# when adding a pattern here. -SPURIOUS_PATTERNS = [ - re.compile(v) for v in [ - # crbug.com/770634, likely a bug in Xcode 9.1 beta, remove once build - # requires a version of Xcode with a fix. - r'\[\]\[ipad\]\[76x76\]\[\]\[\]\[1x\]\[\]\[\]: notice: \(null\)', - - # crbug.com/770634, likely a bug in Xcode 9.2 beta, remove once build - # requires a version of Xcode with a fix. - r'\[\]\[ipad\]\[76x76\]\[\]\[\]\[1x\]\[\]\[\]: notice: 76x76@1x app' - ' icons only apply to iPad apps targeting releases of iOS prior to' - ' 10.0.', - ] -] - # Map special type of asset catalog to the corresponding command-line # parameter that need to be passed to actool. ACTOOL_FLAG_FOR_ASSET_TYPE = { @@ -54,14 +37,13 @@ '.launchimage': '--launch-image', } - -def IsSpuriousMessage(line): - """Returns whether line contains a spurious message that should be ignored.""" - for pattern in SPURIOUS_PATTERNS: - match = pattern.search(line) - if match is not None: - return True - return False +def FixAbsolutePathInLine(line, relative_paths): + """Fix absolute paths present in |line| to relative paths.""" + absolute_path = line.split(':')[0] + relative_path = relative_paths.get(absolute_path, absolute_path) + if absolute_path == relative_path: + return line + return relative_path + line[len(absolute_path):] def FixAbsolutePathInLine(line, relative_paths): @@ -111,8 +93,6 @@ def FilterCompilerOutput(compiler_output, relative_paths): current_section = match.group(1) continue if current_section and current_section != NOTICE_SECTION: - if IsSpuriousMessage(line): - continue if not data_in_section: data_in_section = True filtered_output.append('/* %s */\n' % current_section) @@ -123,8 +103,9 @@ def FilterCompilerOutput(compiler_output, relative_paths): return ''.join(filtered_output) -def CompileAssetCatalog(output, platform, product_type, min_deployment_target, - inputs, compress_pngs, partial_info_plist): +def CompileAssetCatalog(output, platform, target_environment, product_type, + min_deployment_target, inputs, compress_pngs, + partial_info_plist): """Compile the .xcassets bundles to an asset catalog using actool. Args: @@ -143,8 +124,6 @@ def CompileAssetCatalog(output, platform, product_type, min_deployment_target, '--notices', '--warnings', '--errors', - '--platform', - platform, '--minimum-deployment-target', min_deployment_target, ] @@ -155,10 +134,41 @@ def CompileAssetCatalog(output, platform, product_type, min_deployment_target, if product_type != '': command.extend(['--product-type', product_type]) - if platform == 'macosx': - command.extend(['--target-device', 'mac']) - else: - command.extend(['--target-device', 'iphone', '--target-device', 'ipad']) + if platform == 'mac': + command.extend([ + '--platform', + 'macosx', + '--target-device', + 'mac', + ]) + elif platform == 'ios': + if target_environment == 'simulator': + command.extend([ + '--platform', + 'iphonesimulator', + '--target-device', + 'iphone', + '--target-device', + 'ipad', + ]) + elif target_environment == 'device': + command.extend([ + '--platform', + 'iphoneos', + '--target-device', + 'iphone', + '--target-device', + 'ipad', + ]) + elif target_environment == 'catalyst': + command.extend([ + '--platform', + 'macosx', + '--target-device', + 'ipad', + '--ui-framework-family', + 'uikit', + ]) # Scan the input directories for the presence of asset catalog types that # require special treatment, and if so, add them to the actool command-line. @@ -240,8 +250,13 @@ def Main(): parser.add_argument('--platform', '-p', required=True, - choices=('macosx', 'iphoneos', 'iphonesimulator'), + choices=('mac', 'ios'), help='target platform for the compiled assets catalog') + parser.add_argument('--target-environment', + '-e', + default='', + choices=('simulator', 'device', 'catalyst'), + help='target environment for the compiled assets catalog') parser.add_argument( '--minimum-deployment-target', '-t', @@ -278,9 +293,9 @@ def Main(): else: shutil.rmtree(args.output) - CompileAssetCatalog(args.output, args.platform, args.product_type, - args.minimum_deployment_target, args.inputs, - args.compress_pngs, args.partial_info_plist) + CompileAssetCatalog(args.output, args.platform, args.target_environment, + args.product_type, args.minimum_deployment_target, + args.inputs, args.compress_pngs, args.partial_info_plist) if __name__ == '__main__': diff --git a/build/toolchain/ios/swiftc.py b/build/toolchain/ios/swiftc.py index a08dc8d129ef..e77e0b1ff525 100644 --- a/build/toolchain/ios/swiftc.py +++ b/build/toolchain/ios/swiftc.py @@ -1,9 +1,8 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import argparse -import collections import json import os import subprocess @@ -11,12 +10,50 @@ import tempfile -class OrderedSet(collections.OrderedDict): - def add(self, value): - self[value] = True +def fix_module_imports(header_path, output_path): + """Convert modules import to work without -fmodules support. + + The Swift compiler assumes that the generated Objective-C header will be + imported from code compiled with module support enabled (-fmodules). The + generated code thus uses @import and provides no fallback if modules are + not enabled. + + This function converts the generated header to instead use #import. It + assumes that `@import Foo;` can be replaced by `#import `. + + The header is read at `header_path` and written to `output_path`. + """ + + header_contents = [] + with open(header_path, 'r') as header_file: + for line in header_file: + if line == '#if __has_feature(modules)\n': + header_contents.append('#if 1 // #if __has_feature(modules)\n') + nesting_level = 1 + for line in header_file: + if line == '#endif\n': + nesting_level -= 1 + elif line.startswith('@import'): + name = line.split()[1].split(';')[0] + if name != 'ObjectiveC': + header_contents.append(f'#import <{name}/{name}.h> ') + header_contents.append('// ') + elif line.startswith('#if'): + nesting_level += 1 + + header_contents.append(line) + if nesting_level == 0: + break + else: + header_contents.append(line) + + with open(output_path, 'w') as header_file: + for line in header_contents: + header_file.write(line) def compile_module(module, sources, settings, extras, tmpdir): + """Compile `module` from `sources` using `settings`.""" output_file_map = {} if settings.whole_module_optimization: output_file_map[''] = { @@ -47,17 +84,29 @@ def compile_module(module, sources, settings, extras, tmpdir): if not os.path.exists(settings.object_dir): os.makedirs(settings.object_dir) + if not os.path.exists(settings.pch_output_dir): + os.makedirs(settings.pch_output_dir) + for key in output_file_map: path = output_file_map[key]['object'] if os.path.exists(path): os.unlink(path) + output_file_map.setdefault('', {})['swift-dependencies'] = \ + os.path.join(tmpdir, module + '.swift.d') + output_file_map_path = os.path.join(tmpdir, module + '.json') with open(output_file_map_path, 'w') as output_file_map_file: output_file_map_file.write(json.dumps(output_file_map)) output_file_map_file.flush() extra_args = [] + if settings.file_compilation_dir: + extra_args.extend([ + '-file-compilation-dir', + settings.file_compilation_dir, + ]) + if settings.bridge_header: extra_args.extend([ '-import-objc-header', @@ -89,11 +138,51 @@ def compile_module(module, sources, settings, extras, tmpdir): for include_dir in settings.include_dirs: extra_args.append('-I' + include_dir) + if settings.system_include_dirs: + for system_include_dir in settings.system_include_dirs: + extra_args.extend(['-Xcc', '-isystem', '-Xcc', system_include_dir]) + + if settings.framework_dirs: + for framework_dir in settings.framework_dirs: + extra_args.extend([ + '-F', + framework_dir, + ]) + + if settings.system_framework_dirs: + for system_framework_dir in settings.system_framework_dirs: + extra_args.extend([ + '-F', + system_framework_dir, + ]) + + if settings.enable_cxx_interop: + extra_args.extend([ + '-Xfrontend', + '-enable-cxx-interop', + ]) + + # The swiftc compiler uses a global module cache that is not robust against + # changes in the sub-modules nor against corruption (see crbug.com/1358073). + # Force the compiler to store the module cache in a sub-directory of `tmpdir` + # to ensure a pristine module cache is used for every compiler invocation. + module_cache_path = os.path.join(tmpdir, settings.swiftc_version, + 'ModuleCache') + + # If the generated header is post-processed, generate it to a temporary + # location (to avoid having the file appear to suddenly change). + if settings.fix_module_imports: + header_path = os.path.join(tmpdir, f'{module}.h') + else: + header_path = settings.header_path + process = subprocess.Popen([ - 'swiftc', + settings.swift_toolchain_path + '/usr/bin/swiftc', '-parse-as-library', '-module-name', module, + '-module-cache-path', + module_cache_path, '-emit-object', '-emit-dependencies', '-emit-module', @@ -101,23 +190,45 @@ def compile_module(module, sources, settings, extras, tmpdir): settings.module_path, '-emit-objc-header', '-emit-objc-header-path', - settings.header_path, + header_path, '-output-file-map', output_file_map_path, - ] + extra_args + extras + sources, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True) + '-pch-output-dir', + os.path.abspath(settings.pch_output_dir), + ] + extra_args + extras + sources) - stdout, stderr = process.communicate() + process.communicate() if process.returncode: - sys.stdout.write(stdout) - sys.stderr.write(stderr) sys.exit(process.returncode) - depfile_content = collections.OrderedDict() + if settings.fix_module_imports: + fix_module_imports(header_path, settings.header_path) + + # The swiftc compiler generates depfile that uses absolute paths, but + # ninja requires paths in depfiles to be identical to paths used in + # the build.ninja files. + # + # Since gn generates paths relative to the build directory for all paths + # below the repository checkout, we need to convert those to relative + # paths. + # + # See https://crbug.com/1287114 for build failure that happen when the + # paths in the depfile are kept absolute. + out_dir = os.getcwd() + os.path.sep + src_dir = os.path.abspath(settings.root_dir) + os.path.sep + + depfile_content = dict() for key in output_file_map: - for line in open(output_file_map[key]['dependencies']): + + # When whole module optimisation is disabled, there will be an entry + # with an empty string as the key and only ('swift-dependencies') as + # keys in the value dictionary. This is expected, so skip entry that + # do not include 'dependencies' in their keys. + depencency_file_path = output_file_map[key].get('dependencies') + if not depencency_file_path: + continue + + for line in open(depencency_file_path): output, inputs = line.split(' : ', 2) _, ext = os.path.splitext(output) if ext == '.o': @@ -125,15 +236,16 @@ def compile_module(module, sources, settings, extras, tmpdir): else: key = os.path.splitext(settings.module_path)[0] + ext if key not in depfile_content: - depfile_content[key] = OrderedSet() + depfile_content[key] = set() for path in inputs.split(): + if path.startswith(src_dir) or path.startswith(out_dir): + path = os.path.relpath(path, out_dir) depfile_content[key].add(path) with open(settings.depfile, 'w') as depfile: - for key in depfile_content: - if not settings.depfile_filter or key in settings.depfile_filter: - inputs = depfile_content[key] - depfile.write('%s : %s\n' % (key, ' '.join(inputs))) + keys = sorted(depfile_content.keys()) + for key in sorted(keys): + depfile.write('%s : %s\n' % (key, ' '.join(sorted(depfile_content[key])))) def main(args): @@ -144,12 +256,18 @@ def main(args): action='append', dest='include_dirs', help='add directory to header search path') + parser.add_argument('-isystem', + action='append', + dest='system_include_dirs', + help='add directory to system header search path') parser.add_argument('sources', nargs='+', help='Swift source file to compile') parser.add_argument('-whole-module-optimization', action='store_true', help='enable whole module optimization') parser.add_argument('-object-dir', help='path to the generated object files directory') + parser.add_argument('-pch-output-dir', + help='path to directory where .pch files are saved') parser.add_argument('-module-path', help='path to the generated module file') parser.add_argument('-header-path', help='path to the generated header file') parser.add_argument('-bridge-header', @@ -157,13 +275,48 @@ def main(args): parser.add_argument('-depfile', help='path to the generated depfile') parser.add_argument('-swift-version', help='version of Swift language to support') - parser.add_argument('-depfile-filter', - action='append', - help='limit depfile to those files') parser.add_argument('-target', action='store', help='generate code for the given target ') parser.add_argument('-sdk', action='store', help='compile against sdk') + parser.add_argument('-F', + dest='framework_dirs', + action='append', + help='add dir to framework search path') + parser.add_argument('-Fsystem', + '-iframework', + dest='system_framework_dirs', + action='append', + help='add dir to system framework search path') + parser.add_argument('-root-dir', + dest='root_dir', + action='store', + required=True, + help='path to the root of the repository') + parser.add_argument('-swift-toolchain-path', + default='', + action='store', + dest='swift_toolchain_path', + help='path to the root of the Swift toolchain') + parser.add_argument('-file-compilation-dir', + default='', + action='store', + help='compilation directory to embed in the debug info') + parser.add_argument('-enable-cxx-interop', + dest='enable_cxx_interop', + action='store_true', + help='allow importing C++ modules into Swift') + parser.add_argument('-fix-module-imports', + action='store_true', + help='enable hack to fix module imports') + parser.add_argument('-swiftc-version', + default='', + action='store', + help='version of swiftc compiler') + parser.add_argument('-xcode-version', + default='', + action='store', + help='version of xcode') parsed, extras = parser.parse_known_args(args) with tempfile.TemporaryDirectory() as tmpdir: diff --git a/build/toolchain/kythe.gni b/build/toolchain/kythe.gni index 7486a4f15b99..950d94379896 100644 --- a/build/toolchain/kythe.gni +++ b/build/toolchain/kythe.gni @@ -1,4 +1,4 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/toolchain/linux/BUILD.gn b/build/toolchain/linux/BUILD.gn index ee494546ffcb..3d6bb56599e6 100644 --- a/build/toolchain/linux/BUILD.gn +++ b/build/toolchain/linux/BUILD.gn @@ -1,9 +1,11 @@ -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +import("//build/config/dcheck_always_on.gni") import("//build/config/ozone.gni") import("//build/config/sysroot.gni") +import("//build/config/ui.gni") import("//build/toolchain/gcc_toolchain.gni") clang_toolchain("clang_ppc64") { @@ -15,7 +17,6 @@ clang_toolchain("clang_ppc64") { } clang_toolchain("clang_arm") { - toolprefix = "arm-linux-gnueabihf-" toolchain_args = { current_cpu = "arm" current_os = "linux" @@ -23,7 +24,6 @@ clang_toolchain("clang_arm") { } clang_toolchain("clang_arm64") { - toolprefix = "aarch64-linux-gnu-" toolchain_args = { current_cpu = "arm64" current_os = "linux" @@ -46,7 +46,7 @@ gcc_toolchain("arm64") { current_os = "linux" # reclient does not support gcc. - use_rbe = false + use_remoteexec = false is_clang = false } } @@ -67,7 +67,7 @@ gcc_toolchain("arm") { current_os = "linux" # reclient does not support gcc. - use_rbe = false + use_remoteexec = false is_clang = false } } @@ -82,30 +82,6 @@ clang_toolchain("clang_x86") { } } -clang_toolchain("clang_x86_v8_arm") { - toolchain_args = { - current_cpu = "x86" - v8_current_cpu = "arm" - current_os = "linux" - } -} - -clang_toolchain("clang_x86_v8_mipsel") { - toolchain_args = { - current_cpu = "x86" - v8_current_cpu = "mipsel" - current_os = "linux" - } -} - -clang_toolchain("clang_x86_v8_mips") { - toolchain_args = { - current_cpu = "x86" - v8_current_cpu = "mips" - current_os = "linux" - } -} - gcc_toolchain("x86") { cc = "gcc" cxx = "g++" @@ -123,7 +99,7 @@ gcc_toolchain("x86") { current_os = "linux" # reclient does not support gcc. - use_rbe = false + use_remoteexec = false is_clang = false } } @@ -138,40 +114,74 @@ clang_toolchain("clang_x64") { } } -clang_toolchain("clang_x64_v8_arm64") { +template("clang_v8_toolchain") { + clang_toolchain(target_name) { + toolchain_args = { + current_os = "linux" + forward_variables_from(invoker.toolchain_args, "*") + } + } +} + +clang_v8_toolchain("clang_x86_v8_arm") { + toolchain_args = { + current_cpu = "x86" + v8_current_cpu = "arm" + } +} + +clang_v8_toolchain("clang_x86_v8_mipsel") { + toolchain_args = { + current_cpu = "x86" + v8_current_cpu = "mipsel" + } +} + +clang_v8_toolchain("clang_x86_v8_mips") { + toolchain_args = { + current_cpu = "x86" + v8_current_cpu = "mips" + } +} + +clang_v8_toolchain("clang_x64_v8_arm64") { toolchain_args = { current_cpu = "x64" v8_current_cpu = "arm64" - current_os = "linux" } } -clang_toolchain("clang_x64_v8_mips64el") { +clang_v8_toolchain("clang_x64_v8_mips64el") { toolchain_args = { current_cpu = "x64" v8_current_cpu = "mips64el" - current_os = "linux" } } -clang_toolchain("clang_x64_v8_mips64") { +clang_v8_toolchain("clang_x64_v8_mips64") { toolchain_args = { current_cpu = "x64" v8_current_cpu = "mips64" - current_os = "linux" } } -clang_toolchain("clang_x64_v8_riscv64") { +clang_v8_toolchain("clang_x64_v8_riscv64") { toolchain_args = { current_cpu = "x64" v8_current_cpu = "riscv64" - current_os = "linux" + } +} + +clang_v8_toolchain("clang_x64_v8_loong64") { + toolchain_args = { + current_cpu = "x64" + v8_current_cpu = "loong64" } } # In a LaCrOS build, this toolchain is intended to be used as an alternate # toolchain to build Ash-Chrome in a subdirectory. +# This is a test-only toolchain. clang_toolchain("ash_clang_x64") { toolchain_args = { # This turns the toolchain into the "Linux ChromeOS" build @@ -182,9 +192,30 @@ clang_toolchain("ash_clang_x64") { # This turns off all of the LaCrOS-specific flags. also_build_ash_chrome = false chromeos_is_browser_only = false - ozone_platform = "x11" - ozone_platform_wayland = false use_clang_coverage = false + + # Never build ash with asan. It is too slow for builders and cause + # tests being flaky. + is_asan = false + is_lsan = false + } +} + +# In an ash build, this toolchain is intended to be used as an alternate +# toolchain to build lacros-Chrome in a subdirectory. +# This is a test-only toolchain. +clang_toolchain("lacros_clang_x64") { + toolchain_args = { + # This turns the toolchain into the "Lacros" build + current_os = "chromeos" + target_os = "chromeos" + current_cpu = current_cpu + + # This turns on the LaCrOS-specific flag. + also_build_lacros_chrome = false + chromeos_is_browser_only = true + use_clang_coverage = false + dcheck_always_on = false } } @@ -205,7 +236,7 @@ gcc_toolchain("x64") { current_os = "linux" # reclient does not support gcc. - use_rbe = false + use_remoteexec = false is_clang = false } } @@ -240,7 +271,7 @@ gcc_toolchain("mipsel") { current_os = "linux" # reclient does not support gcc. - use_rbe = false + use_remoteexec = false is_clang = false use_goma = false } @@ -262,12 +293,21 @@ gcc_toolchain("mips64el") { current_os = "linux" # reclient does not support gcc. - use_rbe = false + use_remoteexec = false is_clang = false use_goma = false } } +clang_toolchain("clang_riscv64") { + enable_linker_map = true + toolchain_args = { + current_cpu = "riscv64" + current_os = "linux" + is_clang = true + } +} + gcc_toolchain("riscv64") { toolprefix = "riscv64-linux-gnu" @@ -308,7 +348,7 @@ gcc_toolchain("s390x") { current_os = "linux" # reclient does not support gcc. - use_rbe = false + use_remoteexec = false is_clang = false } } @@ -327,7 +367,7 @@ gcc_toolchain("ppc64") { current_os = "linux" # reclient does not support gcc. - use_rbe = false + use_remoteexec = false is_clang = false } } @@ -348,7 +388,7 @@ gcc_toolchain("mips") { current_os = "linux" # reclient does not support gcc. - use_rbe = false + use_remoteexec = false is_clang = false } } @@ -369,7 +409,32 @@ gcc_toolchain("mips64") { current_os = "linux" # reclient does not support gcc. - use_rbe = false + use_remoteexec = false + is_clang = false + } +} + +clang_toolchain("clang_loong64") { + toolchain_args = { + current_cpu = "loong64" + current_os = "linux" + } +} + +gcc_toolchain("loong64") { + toolprefix = "loongarch64-unknown-linux-gnu" + + cc = "${toolprefix}-gcc" + cxx = "${toolprefix}-g++" + + readelf = "${toolprefix}-readelf" + nm = "${toolprefix}-nm" + ar = "${toolprefix}-ar" + ld = cxx + + toolchain_args = { + current_cpu = "loong64" + current_os = "linux" is_clang = false } } diff --git a/build/toolchain/linux/unbundle/BUILD.gn b/build/toolchain/linux/unbundle/BUILD.gn index 4719d540b0ca..a091491236bb 100644 --- a/build/toolchain/linux/unbundle/BUILD.gn +++ b/build/toolchain/linux/unbundle/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/toolchain/linux/unbundle/README.md b/build/toolchain/linux/unbundle/README.md index 17b93c9fdee1..73a81a33a2b6 100644 --- a/build/toolchain/linux/unbundle/README.md +++ b/build/toolchain/linux/unbundle/README.md @@ -28,7 +28,7 @@ custom_toolchain="//build/toolchain/linux/unbundle:default" host_toolchain="//build/toolchain/linux/unbundle:default" ``` -See [more docs on GN](https://gn.googlesource.com/gn/+/master/docs/quick_start.md). +See [more docs on GN](https://gn.googlesource.com/gn/+/main/docs/quick_start.md). To cross-compile (not fully tested), add the following: diff --git a/build/toolchain/mac/BUILD.gn b/build/toolchain/mac/BUILD.gn index 1d0f2a2c2286..73f44ae18341 100644 --- a/build/toolchain/mac/BUILD.gn +++ b/build/toolchain/mac/BUILD.gn @@ -1,9 +1,11 @@ -# Copyright 2021 The Chromium Authors. All rights reserved. +# Copyright 2021 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/config/mac/mac_sdk.gni") +import("//build/config/v8_target_cpu.gni") import("//build/toolchain/apple/toolchain.gni") +import("//build_overrides/build.gni") # Specialisation of the apple_toolchain template to declare the toolchain # and its tools to build target for macOS platform. @@ -21,14 +23,26 @@ template("mac_toolchain") { current_os = "mac" if (target_os == "ios") { + # Use LLD for the host part of a chrome/ios build. + use_lld = true + + # Override `is_component_build` for the host toolchain. + # See https://crbug.com/gn/286 for details why this is + # required. + is_component_build = is_debug + + # Defined in //base, would trigger a warning if the build doesn't depend + # on it. + if (build_with_chromium) { + # cronet disable this because it targets 32-bit, + # enable it unconditionally for the host toolchain. + use_allocator_shim = true + } + # TODO(crbug.com/753445): the use_sanitizer_coverage arg is currently # not supported by the Chromium mac_clang_x64 toolchain on iOS # distribution. use_sanitizer_coverage = false - - # Do not use Xcode version of clang when building macOS tools for the - # host even if this is the version used to build for the iOS target. - use_xcode_clang = false } } } @@ -61,39 +75,42 @@ mac_toolchain("clang_x86") { mac_toolchain("clang_x86_v8_arm") { toolchain_args = { current_cpu = "x86" - - if (defined(v8_current_cpu)) { - v8_current_cpu = "arm" - } + v8_current_cpu = "arm" } } mac_toolchain("clang_x86_v8_mipsel") { toolchain_args = { current_cpu = "x86" - - if (defined(v8_current_cpu)) { - v8_current_cpu = "mipsel" - } + v8_current_cpu = "mipsel" } } mac_toolchain("clang_x64_v8_arm64") { toolchain_args = { current_cpu = "x64" - - if (defined(v8_current_cpu)) { - v8_current_cpu = "arm64" - } + v8_current_cpu = "arm64" } } mac_toolchain("clang_x64_v8_mips64el") { toolchain_args = { current_cpu = "x64" + v8_current_cpu = "mips64el" + } +} - if (defined(v8_current_cpu)) { - v8_current_cpu = "mips64el" - } +mac_toolchain("clang_arm64_v8_x64") { + toolchain_args = { + current_cpu = "arm64" + v8_current_cpu = "x64" + } +} + +# Needed to run v8 on the host during a arm64 -> x86_64 cross-build +mac_toolchain("clang_arm64_v8_arm64") { + toolchain_args = { + current_cpu = "arm64" + v8_current_cpu = "arm64" } } diff --git a/build/toolchain/nacl/BUILD.gn b/build/toolchain/nacl/BUILD.gn index 66a88bedb5e2..00f718976435 100644 --- a/build/toolchain/nacl/BUILD.gn +++ b/build/toolchain/nacl/BUILD.gn @@ -18,12 +18,14 @@ revisions = exec_script("//native_client/build/get_toolchain_revision.py", "nacl_x86_glibc", "nacl_arm_glibc", "pnacl_newlib", + "saigo_newlib", ], "trim list lines") nacl_x86_glibc_rev = revisions[0] nacl_arm_glibc_rev = revisions[1] pnacl_newlib_rev = revisions[2] +saigo_newlib_rev = revisions[3] if (host_os == "win") { toolsuffix = ".exe" @@ -49,7 +51,7 @@ if (host_os == "win") { # When the compilers are run via goma, rbe or ccache rather than directly by # GN/Ninja, the rbe/goma/ccache wrapper handles .bat files but gets confused # by being given the scriptprefix. -if (host_os == "win" && !use_goma && !use_rbe && cc_wrapper == "") { +if (host_os == "win" && !use_goma && !use_remoteexec && cc_wrapper == "") { compiler_scriptprefix = scriptprefix } else { compiler_scriptprefix = "" @@ -112,15 +114,6 @@ pnacl_toolchain("newlib_pnacl") { strip = "finalize" } -pnacl_toolchain("newlib_pnacl_nonsfi") { - executable_extension = "" - strip = "strip" - - # This macro is embedded on nonsfi toolchains but reclient can't figure - # that out itself, so we make it explicit. - extra_cppflags = "-D__native_client_nonsfi__" -} - template("nacl_glibc_toolchain") { toolchain_cpu = target_name assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple") @@ -155,7 +148,7 @@ template("nacl_glibc_toolchain") { current_cpu = toolchain_cpu # reclient does not support gcc. - use_rbe = false + use_remoteexec = false is_clang = false is_nacl_glibc = true use_lld = false @@ -224,8 +217,8 @@ template("nacl_irt_toolchain") { toolchain_cpu = target_name assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple") - toolchain_package = "pnacl_newlib" - toolchain_revision = pnacl_newlib_rev + toolchain_package = "saigo_newlib" + toolchain_revision = saigo_newlib_rev toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" + invoker.toolchain_tuple + "-", root_build_dir) @@ -261,6 +254,7 @@ template("nacl_irt_toolchain") { current_cpu = toolchain_cpu is_clang = true use_lld = false + is_nacl_saigo = true } # TODO(ncbray): depend on link script diff --git a/build/toolchain/nacl_toolchain.gni b/build/toolchain/nacl_toolchain.gni index 100c0b09658d..bebdbd80c968 100644 --- a/build/toolchain/nacl_toolchain.gni +++ b/build/toolchain/nacl_toolchain.gni @@ -49,23 +49,25 @@ template("nacl_toolchain") { # We do not support component builds with the NaCl toolchains. is_component_build = false - # We do not support tcmalloc in the NaCl toolchains. - use_allocator = "none" - use_allocator_shim = false - # We do not support clang profiling in the NaCl toolchains. use_clang_profiling = false use_clang_coverage = false coverage_instrumentation_input_file = "" - if (use_rbe) { + if (use_remoteexec) { if (is_win) { - rbe_cc_cfg_file = "${rbe_cfg_dir}/rewrapper_windows_nacl.cfg" + rbe_cc_cfg_file = "${rbe_cfg_dir}/nacl/rewrapper_windows.cfg" + } else if (is_mac) { + rbe_cc_cfg_file = "${rbe_cfg_dir}/nacl/rewrapper_mac.cfg" } else { # TODO(ukai): non linux? - rbe_cc_cfg_file = "${rbe_cfg_dir}/rewrapper_linux_nacl.cfg" + rbe_cc_cfg_file = "${rbe_cfg_dir}/nacl/rewrapper_linux.cfg" } } + + if (use_remoteexec_links) { + rbe_link_cfg_file = "${rbe_cfg_dir}/nacl/rewrapper_linux_link.cfg" + } } } } diff --git a/build/toolchain/rbe.gni b/build/toolchain/rbe.gni index cf1848c9c0a7..cd86e22c01ef 100644 --- a/build/toolchain/rbe.gni +++ b/build/toolchain/rbe.gni @@ -3,30 +3,60 @@ # The directory where the re-client tooling binaries are. rbe_bin_dir = rebase_path("//buildtools/reclient", root_build_dir) -# The directory where the re-client configuration files are. -rbe_cfg_dir = rebase_path("//buildtools/reclient_cfgs", root_build_dir) +declare_args() { + # Execution root - this should be the root of the source tree. + # This is defined here instead of in the config file because + # this will vary depending on where the user has placed the + # chromium source on their system. + rbe_exec_root = rebase_path("//") -# RBE Execution root - this should be the root of the source tree. -# This is defined here instead of in the config file because -# this will vary depending on where the user has placed the -# chromium source on their system. -rbe_exec_root = rebase_path("//") + # Set to true to enable remote compilation using reclient. + use_remoteexec = false -declare_args() { - # Set to true to enable remote compilation using RBE. - use_rbe = false + # Set to true to enable remote linking using reclient. + use_remoteexec_links = false - # Set to the path of the RBE reclient configuration file. - rbe_cc_cfg_file = "" + # The directory where the re-client configuration files are. + rbe_cfg_dir = rebase_path("//buildtools/reclient_cfgs", root_build_dir) } -# Configuration file selection based on operating system. -if (is_linux || is_android) { - rbe_cc_cfg_file = "${rbe_cfg_dir}/rewrapper_linux.cfg" +declare_args() { + # Set to the path of the RBE reclient configuration files. + # Configuration file selection based on operating system. + if (is_linux || is_android || is_chromeos || is_fuchsia) { + rbe_py_cfg_file = "${rbe_cfg_dir}/python/rewrapper_linux.cfg" + rbe_cc_cfg_file = + "${rbe_cfg_dir}/chromium-browser-clang/rewrapper_linux.cfg" + rbe_link_cfg_file = + "${rbe_cfg_dir}/chromium-browser-clang/rewrapper_linux_link.cfg" + } else if (is_win) { + rbe_py_cfg_file = "${rbe_cfg_dir}/python/rewrapper_windows.cfg" + rbe_cc_cfg_file = + "${rbe_cfg_dir}/chromium-browser-clang/rewrapper_windows.cfg" + rbe_link_cfg_file = "" + } else if (is_mac || is_ios) { + rbe_py_cfg_file = "${rbe_cfg_dir}/python/rewrapper_mac.cfg" + rbe_cc_cfg_file = "${rbe_cfg_dir}/chromium-browser-clang/rewrapper_mac.cfg" + rbe_link_cfg_file = "" + } else { + rbe_linkcfg_file = "" + rbe_py_cfg_file = "" + rbe_cc_cfg_file = "" + } + + # Set to the path of the RBE recleint wrapper for ChromeOS. + rbe_cros_cc_wrapper = "${rbe_bin_dir}/rewrapper" } + if (is_win) { - rbe_cc_cfg_file = "${rbe_cfg_dir}/rewrapper_windows.cfg" + if (use_remoteexec_links) { + print("For now, remote linking is not available for Windows.") + use_remoteexec_links = false + } } if (is_mac || is_ios) { - rbe_cc_cfg_file = "${rbe_cfg_dir}/rewrapper_mac.cfg" + if (use_remoteexec_links) { + print("For now, remote linking is not available on Macs.") + use_remoteexec_links = false + } } diff --git a/build/toolchain/toolchain.gni b/build/toolchain/toolchain.gni index a1e250558631..d32d7d0e9dd9 100644 --- a/build/toolchain/toolchain.gni +++ b/build/toolchain/toolchain.gni @@ -1,4 +1,4 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -17,19 +17,19 @@ declare_args() { # `gclient runhooks` is run as well. llvm_force_head_revision = false - # Compile with Xcode version of clang instead of hermetic version shipped - # with the build. Used to be used iOS for official builds, but is now off by - # default for all configurations. - use_xcode_clang = false - # Used for binary size analysis. generate_linker_map = is_android && is_official_build + + # Whether this toolchain should avoid building any sanitizer support + # because it's a host toolchain where we aim to make shared objects that may + # be loaded by prebuilt binaries without sanitizer support. + toolchain_disables_sanitizers = false } if (generate_linker_map) { - assert(is_official_build || is_chromecast, + assert(is_official_build || is_castos || is_cast_android, "Linker map files should only be generated when is_official_build = " + - "true or is_chromecast = true") + "true or is_castos = true or is_cast_android = true") assert(current_os == "android" || current_os == "linux" || target_os == "android" || target_os == "linux" || target_os == "chromeos", @@ -38,15 +38,9 @@ if (generate_linker_map) { } declare_args() { - clang_version = "13.0.0" + clang_version = "17" } -# Check target_os here instead of is_ios as this file is loaded for secondary -# toolchain (host toolchain in particular) but the argument is the same for -# all toolchains. -assert(is_starboard || !use_xcode_clang || target_os == "ios", - "Using Xcode's clang is only supported in iOS builds") - # Extension for shared library files (including leading dot). if (is_apple) { shlib_extension = ".dylib" @@ -90,12 +84,16 @@ if (host_os == "win") { stamp_command = "cmd /c type nul > \"{{output}}\"" copy_command = - "$python_path $_tool_wrapper_path recursive-mirror {{source}} {{output}}" + "\"$python_path\" $_tool_wrapper_path recursive-mirror {{source}} {{output}}" } else { stamp_command = "touch {{output}}" copy_command = "ln -f {{source}} {{output}} 2>/dev/null || (rm -rf {{output}} && cp -af {{source}} {{output}})" } +if (!defined(toolchain_disables_sanitizers)) { + toolchain_disables_sanitizers = false +} + # This variable is true if the current toolchain is one of the target # toolchains, i.e. a toolchain which is being used to build the main Chrome # binary. This generally means "not the host toolchain", but in the case where diff --git a/build/toolchain/whole_archive.py b/build/toolchain/whole_archive.py new file mode 100644 index 000000000000..aeeb0ddc6997 --- /dev/null +++ b/build/toolchain/whole_archive.py @@ -0,0 +1,51 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import re + + +def wrap_with_whole_archive(command): + """Modify and return `command` such that -LinkWrapper,add-whole-archive=X + becomes a linking inclusion X (-lX) but wrapped in whole-archive + modifiers.""" + + # We want to link rlibs as --whole-archive if they are part of a unit test + # target. This is determined by switch `-LinkWrapper,add-whole-archive`. + # + # TODO(danakj): If the linking command line gets too large we could move + # {{rlibs}} into the rsp file, but then this script needs to modify the rsp + # file instead of the command line. + def extract_libname(s): + m = re.match(r'-LinkWrapper,add-whole-archive=(.+)', s) + return m.group(1) + + # The set of libraries we want to apply `--whole-archive`` to. + whole_archive_libs = [ + extract_libname(x) for x in command + if x.startswith("-LinkWrapper,add-whole-archive=") + ] + + # Remove the arguments meant for consumption by this LinkWrapper script. + command = [x for x in command if not x.startswith("-LinkWrapper,")] + + def has_any_suffix(string, suffixes): + for suffix in suffixes: + if string.endswith(suffix): + return True + return False + + def wrap_libs_with(command, libnames, before, after): + out = [] + for arg in command: + # The arg is a full path to a library, we look if the the library name (a + # suffix of the full arg) is one of `libnames`. + if has_any_suffix(arg, libnames): + out.extend([before, arg, after]) + else: + out.append(arg) + return out + + # Apply --whole-archive to the libraries that desire it. + return wrap_libs_with(command, whole_archive_libs, "-Wl,--whole-archive", + "-Wl,--no-whole-archive") diff --git a/build/toolchain/win/BUILD.gn b/build/toolchain/win/BUILD.gn index a8137526d343..fe0979038df8 100644 --- a/build/toolchain/win/BUILD.gn +++ b/build/toolchain/win/BUILD.gn @@ -1,18 +1,11 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -import("//build/config/clang/clang.gni") -import("//build/config/compiler/compiler.gni") -import("//build/config/sanitizers/sanitizers.gni") import("//build/config/win/visual_studio_version.gni") -import("//build/toolchain/cc_wrapper.gni") -import("//build/toolchain/goma.gni") -import("//build/toolchain/rbe.gni") -import("//build/toolchain/toolchain.gni") +import("//build/toolchain/win/toolchain.gni") -# Should only be running on Windows. -assert(is_win) +assert(is_win, "Should only be running on Windows") # Setup the Visual Studio state. # @@ -20,29 +13,6 @@ assert(is_win) # "environment.x86" and "environment.x64" to the build directory and return a # list to us. -# This tool will is used as a wrapper for various commands below. -tool_wrapper_path = rebase_path("tool_wrapper.py", root_build_dir) - -if (use_rbe) { - goma_prefix = "" - rbe_prefix = "${rbe_bin_dir}/rewrapper -cfg=${rbe_cc_cfg_file} -exec_root=${rbe_exec_root} " - clang_prefix = rbe_prefix -} else if (use_goma) { - if (host_os == "win") { - goma_prefix = "$goma_dir/gomacc.exe " - } else { - goma_prefix = "$goma_dir/gomacc " - } - clang_prefix = goma_prefix -} else { - goma_prefix = "" - if (cc_wrapper != "") { - clang_prefix = cc_wrapper + " " - } else { - clang_prefix = "" - } -} - # Copy the VS runtime DLL for the default toolchain to the root build directory # so things will run. if (current_toolchain == default_toolchain) { @@ -60,422 +30,6 @@ if (current_toolchain == default_toolchain) { ]) } -if (host_os == "win") { - clang_cl = "clang-cl.exe" -} else { - clang_cl = "clang-cl" -} - -# Parameters: -# environment: File name of environment file. -# -# You would also define a toolchain_args variable with at least these set: -# current_cpu: current_cpu to pass as a build arg -# current_os: current_os to pass as a build arg -template("msvc_toolchain") { - toolchain(target_name) { - # When invoking this toolchain not as the default one, these args will be - # passed to the build. They are ignored when this is the default toolchain. - assert(defined(invoker.toolchain_args)) - toolchain_args = { - if (defined(invoker.toolchain_args)) { - forward_variables_from(invoker.toolchain_args, "*") - } - - # This value needs to be passed through unchanged. - host_toolchain = host_toolchain - } - - # Make these apply to all tools below. - lib_switch = "" - lib_dir_switch = "/LIBPATH:" - - # Object files go in this directory. - object_subdir = "{{target_out_dir}}/{{label_name}}" - - env = invoker.environment - - cl = invoker.cl - - if (use_lld) { - if (host_os == "win") { - lld_link = "lld-link.exe" - } else { - lld_link = "lld-link" - } - prefix = rebase_path("$clang_base_path/bin", root_build_dir) - - # lld-link includes a replacement for lib.exe that can produce thin - # archives and understands bitcode (for lto builds). - link = "$prefix/$lld_link" - if (host_os == "win") { - # Flip the slashes so that copy/paste of the commands works. - link = string_replace(link, "/", "\\") - } - lib = "$link /lib" - if (host_os != "win") { - # See comment adding --rsp-quoting to $cl above for more information. - link = "$link --rsp-quoting=posix" - } - } else { - lib = "lib.exe" - link = "link.exe" - } - - # If possible, pass system includes as flags to the compiler. When that's - # not possible, load a full environment file (containing %INCLUDE% and - # %PATH%) -- e.g. 32-bit MSVS builds require %PATH% to be set and just - # passing in a list of include directories isn't enough. - if (defined(invoker.sys_include_flags)) { - env_wrapper = "" - sys_include_flags = - "${invoker.sys_include_flags} " # Note trailing space. - } else { - # clang-cl doesn't need this env hoop, so omit it there. - assert((defined(toolchain_args.is_clang) && !toolchain_args.is_clang) || - !is_clang) - env_wrapper = "ninja -t msvc -e $env -- " # Note trailing space. - sys_include_flags = "" - } - - # ninja does not have -t msvc other than windows, and lld doesn't depend on - # mt.exe in PATH on non-Windows, so it's not needed there anyways. - if (host_os != "win") { - linker_wrapper = "" - sys_lib_flags = "${invoker.sys_lib_flags} " # Note trailing space. - } else if (defined(invoker.sys_lib_flags)) { - # Invoke ninja as wrapper instead of tool wrapper, because python - # invocation requires higher cpu usage compared to ninja invocation, and - # the python wrapper is only needed to work around link.exe problems. - # TODO(thakis): Remove wrapper once lld-link can merge manifests without - # relying on mt.exe being in %PATH% on Windows, https://crbug.com/872740 - linker_wrapper = "ninja -t msvc -e $env -- " # Note trailing space. - sys_lib_flags = "${invoker.sys_lib_flags} " # Note trailing space. - } else { - # Note trailing space: - linker_wrapper = - "$python_path $tool_wrapper_path link-wrapper $env False " - sys_lib_flags = "" - } - - if (defined(toolchain_args.use_clang_coverage)) { - toolchain_use_clang_coverage = toolchain_args.use_clang_coverage - } else { - toolchain_use_clang_coverage = use_clang_coverage - } - - if (toolchain_use_clang_coverage) { - assert(toolchain_args.is_clang, - "use_clang_coverage should only be used with Clang") - if (defined(toolchain_args.coverage_instrumentation_input_file)) { - toolchain_coverage_instrumentation_input_file = - toolchain_args.coverage_instrumentation_input_file - } else { - toolchain_coverage_instrumentation_input_file = - coverage_instrumentation_input_file - } - - coverage_wrapper = - rebase_path("//build/toolchain/clang_code_coverage_wrapper.py", - root_build_dir) - coverage_wrapper = coverage_wrapper + " --target-os=" + target_os - if (toolchain_coverage_instrumentation_input_file != "") { - coverage_wrapper = - coverage_wrapper + " --files-to-instrument=" + - rebase_path(toolchain_coverage_instrumentation_input_file, - root_build_dir) - } - coverage_wrapper = "$python_path " + coverage_wrapper + " " - } else { - coverage_wrapper = "" - } - - if (toolchain_args.is_clang) { - # This flag omits system includes from /showIncludes output, to reduce the - # amount of data to parse and store in .ninja_deps. We do this on non-Windows too, - # and already make sure rebuilds after win sdk / libc++ / clang header updates happen via - # changing commandline flags. - show_includes = "/showIncludes:user" - } else { - show_includes = "/showIncludes" - } - - tool("cc") { - precompiled_header_type = "msvc" - pdbname = "{{target_out_dir}}/{{label_name}}_c.pdb" - - # Label names may have spaces in them so the pdbname must be quoted. The - # source and output don't need to be quoted because GN knows they're a - # full file name and will quote automatically when necessary. - depsformat = "msvc" - description = "CC {{output}}" - outputs = [ "$object_subdir/{{source_name_part}}.obj" ] - - command = "$coverage_wrapper$env_wrapper$cl /nologo $show_includes $sys_include_flags{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} /c {{source}} /Fo{{output}} /Fd\"$pdbname\"" - } - - tool("cxx") { - precompiled_header_type = "msvc" - - # The PDB name needs to be different between C and C++ compiled files. - pdbname = "{{target_out_dir}}/{{label_name}}_cc.pdb" - - # See comment in CC tool about quoting. - depsformat = "msvc" - description = "CXX {{output}}" - outputs = [ "$object_subdir/{{source_name_part}}.obj" ] - - command = "$coverage_wrapper$env_wrapper$cl /nologo $show_includes $sys_include_flags{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} /c {{source}} /Fo{{output}} /Fd\"$pdbname\"" - } - - tool("rc") { - command = "$python_path $tool_wrapper_path rc-wrapper $env rc.exe /nologo $sys_include_flags{{defines}} {{include_dirs}} /fo{{output}} {{source}}" - depsformat = "msvc" - outputs = [ "$object_subdir/{{source_name_part}}.res" ] - description = "RC {{output}}" - } - - tool("asm") { - is_msvc_assembler = true - - if (toolchain_args.current_cpu == "arm64") { - if (is_clang) { - prefix = rebase_path("$clang_base_path/bin", root_build_dir) - ml = "${clang_prefix}${prefix}/${clang_cl} --target=arm64-windows" - if (host_os == "win") { - # Flip the slashes so that copy/paste of the command works. - ml = string_replace(ml, "/", "\\") - } - ml += " -c -o{{output}}" - is_msvc_assembler = false - } else { - # Only affects Arm builds with is_clang = false, implemented for building - # V8 for Windows on Arm systems with the MSVC toolchain. - ml = "armasm64.exe" - } - } else { - # x86/x64 builds always use the MSVC assembler. - if (toolchain_args.current_cpu == "x64") { - ml = "ml64.exe" - } else { - ml = "ml.exe" - } - } - - if (is_msvc_assembler) { - ml += " /nologo /Fo{{output}}" - - # Suppress final-stage linking on x64/x86 builds. (Armasm64 does not - # require /c because it doesn't support linking.) - if (toolchain_args.current_cpu != "arm64") { - ml += " /c" - } - if (use_lld) { - # Wrap ml(64).exe with a script that makes its output deterministic. - # It's lld only because the script zaps obj Timestamp which - # link.exe /incremental looks at. - # TODO(https://crbug.com/762167): If we end up writing an llvm-ml64, - # make sure it has deterministic output (maybe with /Brepro or - # something) and remove this wrapper. - ml_py = rebase_path("ml.py", root_build_dir) - ml = "$python_path $ml_py $ml" - } - } - if (toolchain_args.current_cpu != "arm64" || is_clang) { - command = "$python_path $tool_wrapper_path asm-wrapper $env $ml {{defines}} {{include_dirs}} {{asmflags}} {{source}}" - } else { - # armasm64.exe does not support definitions passed via the command line. - # (Fortunately, they're not needed for compiling the V8 snapshot, which - # is the only time this assembler is required.) - command = "$python_path $tool_wrapper_path asm-wrapper $env $ml {{include_dirs}} {{asmflags}} {{source}}" - } - - description = "ASM {{output}}" - outputs = [ "$object_subdir/{{source_name_part}}.obj" ] - } - - tool("alink") { - rspfile = "{{output}}.rsp" - command = "$linker_wrapper$lib /OUT:{{output}} /nologo ${sys_lib_flags}{{arflags}} @$rspfile" - description = "LIB {{output}}" - outputs = [ - # Ignore {{output_extension}} and always use .lib, there's no reason to - # allow targets to override this extension on Windows. - "{{output_dir}}/{{target_output_name}}.lib", - ] - default_output_extension = ".lib" - default_output_dir = "{{target_out_dir}}" - - # The use of inputs_newline is to work around a fixed per-line buffer - # size in the linker. - rspfile_content = "{{inputs_newline}}" - } - - tool("solink") { - # E.g. "foo.dll": - dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" - libname = "${dllname}.lib" # e.g. foo.dll.lib - pdbname = "${dllname}.pdb" - rspfile = "${dllname}.rsp" - pool = "//build/toolchain:link_pool($default_toolchain)" - - command = "$linker_wrapper$link /OUT:$dllname /nologo ${sys_lib_flags}/IMPLIB:$libname /DLL /PDB:$pdbname @$rspfile" - - default_output_extension = ".dll" - default_output_dir = "{{root_out_dir}}" - description = "LINK(DLL) {{output}}" - outputs = [ - dllname, - libname, - pdbname, - ] - link_output = libname - depend_output = libname - runtime_outputs = [ - dllname, - pdbname, - ] - - # Since the above commands only updates the .lib file when it changes, ask - # Ninja to check if the timestamp actually changed to know if downstream - # dependencies should be recompiled. - restat = true - - # The use of inputs_newline is to work around a fixed per-line buffer - # size in the linker. - rspfile_content = "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}}" - } - - tool("solink_module") { - # E.g. "foo.dll": - dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" - pdbname = "${dllname}.pdb" - rspfile = "${dllname}.rsp" - pool = "//build/toolchain:link_pool($default_toolchain)" - - command = "$linker_wrapper$link /OUT:$dllname /nologo ${sys_lib_flags}/DLL /PDB:$pdbname @$rspfile" - - default_output_extension = ".dll" - default_output_dir = "{{root_out_dir}}" - description = "LINK_MODULE(DLL) {{output}}" - outputs = [ - dllname, - pdbname, - ] - runtime_outputs = outputs - - # The use of inputs_newline is to work around a fixed per-line buffer - # size in the linker. - rspfile_content = "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}}" - } - - tool("link") { - exename = "{{output_dir}}/{{target_output_name}}{{output_extension}}" - pdbname = "$exename.pdb" - rspfile = "$exename.rsp" - pool = "//build/toolchain:link_pool($default_toolchain)" - - command = "$linker_wrapper$link /OUT:$exename /nologo ${sys_lib_flags} /PDB:$pdbname @$rspfile" - - default_output_extension = ".exe" - default_output_dir = "{{root_out_dir}}" - description = "LINK {{output}}" - outputs = [ - exename, - pdbname, - ] - runtime_outputs = outputs - - # The use of inputs_newline is to work around a fixed per-line buffer - # size in the linker. - rspfile_content = "{{inputs_newline}} {{libs}} {{solibs}} {{ldflags}}" - } - - # These two are really entirely generic, but have to be repeated in - # each toolchain because GN doesn't allow a template to be used here. - # See //build/toolchain/toolchain.gni for details. - tool("stamp") { - command = stamp_command - description = stamp_description - pool = "//build/toolchain:action_pool($default_toolchain)" - } - tool("copy") { - command = copy_command - description = copy_description - pool = "//build/toolchain:action_pool($default_toolchain)" - } - - tool("action") { - pool = "//build/toolchain:action_pool($default_toolchain)" - } - } -} - -template("win_toolchains") { - assert(defined(invoker.toolchain_arch)) - toolchain_arch = invoker.toolchain_arch - - win_toolchain_data = exec_script("setup_toolchain.py", - [ - visual_studio_path, - windows_sdk_path, - visual_studio_runtime_dirs, - "win", - toolchain_arch, - "environment." + toolchain_arch, - ], - "scope") - - # The toolchain using MSVC only makes sense when not doing cross builds. - # Chromium exclusively uses the win_clang_ toolchain below, but V8 and - # WebRTC still use this MSVC toolchain in some cases. - if (host_os == "win") { - msvc_toolchain(target_name) { - environment = "environment." + toolchain_arch - cl = "${goma_prefix}\"${win_toolchain_data.vc_bin_dir}/cl.exe\"" - - toolchain_args = { - if (defined(invoker.toolchain_args)) { - forward_variables_from(invoker.toolchain_args, "*") - } - is_clang = false - use_clang_coverage = false - current_os = "win" - current_cpu = toolchain_arch - } - } - } - - msvc_toolchain("win_clang_" + target_name) { - environment = "environment." + toolchain_arch - prefix = rebase_path("$clang_base_path/bin", root_build_dir) - cl = "${clang_prefix}$prefix/${clang_cl}" - _clang_lib_dir = - rebase_path("$clang_base_path/lib/clang/$clang_version/lib/windows", - root_build_dir) - if (host_os == "win") { - # Flip the slashes so that copy/paste of the command works. - cl = string_replace(cl, "/", "\\") - - # And to match the other -libpath flags. - _clang_lib_dir = string_replace(_clang_lib_dir, "/", "\\") - } - - sys_include_flags = "${win_toolchain_data.include_flags_imsvc}" - sys_lib_flags = - "-libpath:$_clang_lib_dir ${win_toolchain_data.libpath_flags}" - - toolchain_args = { - if (defined(invoker.toolchain_args)) { - forward_variables_from(invoker.toolchain_args, "*") - } - is_clang = true - current_os = "win" - current_cpu = toolchain_arch - } - } -} - if (target_cpu == "x86" || target_cpu == "x64") { win_toolchains("x86") { toolchain_arch = "x86" @@ -511,20 +65,26 @@ win_toolchains("nacl_win64") { if (target_os == "winuwp") { assert(target_cpu == "x64" || target_cpu == "x86" || target_cpu == "arm" || target_cpu == "arm64") - store_cpu_toolchain_data = exec_script("setup_toolchain.py", - [ - visual_studio_path, - windows_sdk_path, - visual_studio_runtime_dirs, - target_os, - target_cpu, - "environment.store_" + target_cpu, - ], - "scope") + + # Note that //build/toolchain/win/win_toolchain_data.gni collects the output + # of setup_toolchain.py, however it's not compatible with the UWP toolchain, + # as the UWP toolchain requires the `environment.store_$CPU` variable, instead + # of the usual `environment.$CPU`. + store_cpu_toolchain_data = + exec_script("//build/toolchain/win/setup_toolchain.py", + [ + visual_studio_path, + windows_sdk_path, + visual_studio_runtime_dirs, + target_os, + target_cpu, + "environment.store_" + target_cpu, + ], + "scope") msvc_toolchain("uwp_" + target_cpu) { environment = "environment.store_" + target_cpu - cl = "${goma_prefix}\"${store_cpu_toolchain_data.vc_bin_dir}/cl.exe\"" + cl = "\"${store_cpu_toolchain_data.vc_bin_dir}/cl.exe\"" toolchain_args = { current_os = "winuwp" current_cpu = target_cpu diff --git a/build/toolchain/win/midl.gni b/build/toolchain/win/midl.gni index 9f701428509e..1f3ea409da12 100644 --- a/build/toolchain/win/midl.gni +++ b/build/toolchain/win/midl.gni @@ -1,4 +1,4 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -64,11 +64,12 @@ template("midl") { } if (defined(invoker.generated_dir)) { - generated_dir = rebase_path(invoker.generated_dir) + generated_dir = rebase_path(invoker.generated_dir, root_build_dir) } else { # midl.py expects 'gen' to be replaced with 'midl'. - generated_dir = rebase_path("//third_party/win_build_output") + "/midl/" + - rebase_path(out_dir, root_gen_dir) + generated_dir = + rebase_path("//third_party/win_build_output", root_build_dir) + + "/midl/" + rebase_path(out_dir, root_gen_dir) } if (defined(invoker.dynamic_guids)) { diff --git a/build/toolchain/win/midl.py b/build/toolchain/win/midl.py index cfb4220133c8..9c0d0a4ccff6 100644 --- a/build/toolchain/win/midl.py +++ b/build/toolchain/win/midl.py @@ -1,13 +1,11 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from __future__ import division -from __future__ import print_function import array import difflib -import distutils.dir_util import filecmp import io import operator @@ -264,8 +262,6 @@ def run_midl(args, env_dict): stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, _ = popen.communicate() - if popen.returncode != 0: - return popen.returncode, midl_output_dir # Filter junk out of stdout, and write filtered versions. Output we want # to filter is pairs of lines that look like this: @@ -279,6 +275,9 @@ def run_midl(args, env_dict): if not line.startswith(prefixes) and line not in processing: print(line) + if popen.returncode != 0: + return popen.returncode, midl_output_dir + for f in os.listdir(midl_output_dir): ZapTimestamp(os.path.join(midl_output_dir, f)) diff --git a/build/toolchain/win/ml.py b/build/toolchain/win/ml.py index 6a1b6e577e0d..8cc2c9e1ea42 100755 --- a/build/toolchain/win/ml.py +++ b/build/toolchain/win/ml.py @@ -1,8 +1,7 @@ -#!/usr/bin/env python -# Copyright 2018 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. - """Wraps ml.exe or ml64.exe and postprocesses the output to be deterministic. Sets timestamp in .obj file to 0, hence incompatible with link.exe /incremental. @@ -18,6 +17,7 @@ class Struct(object): """A thin wrapper around the struct module that returns a namedtuple""" + def __init__(self, name, *args): """Pass the name of the return type, and then an interleaved list of format strings as used by the struct module and of field names.""" @@ -60,37 +60,25 @@ def MakeDeterministic(objdata): objdata = array.array('b', objdata) # Writable, e.g. via struct.pack_into. # Read coff header. - COFFHEADER = Struct('COFFHEADER', - 'H', 'Machine', - 'H', 'NumberOfSections', - 'I', 'TimeDateStamp', - 'I', 'PointerToSymbolTable', - 'I', 'NumberOfSymbols', - - 'H', 'SizeOfOptionalHeader', - 'H', 'Characteristics') + COFFHEADER = Struct('COFFHEADER', 'H', 'Machine', 'H', 'NumberOfSections', + 'I', 'TimeDateStamp', 'I', 'PointerToSymbolTable', 'I', + 'NumberOfSymbols', 'H', 'SizeOfOptionalHeader', 'H', + 'Characteristics') coff_header = COFFHEADER.unpack_from(objdata) assert coff_header.SizeOfOptionalHeader == 0 # Only set for binaries. # Read section headers following coff header. - SECTIONHEADER = Struct('SECTIONHEADER', - '8s', 'Name', - 'I', 'VirtualSize', - 'I', 'VirtualAddress', - - 'I', 'SizeOfRawData', - 'I', 'PointerToRawData', - 'I', 'PointerToRelocations', - 'I', 'PointerToLineNumbers', - - 'H', 'NumberOfRelocations', - 'H', 'NumberOfLineNumbers', - 'I', 'Characteristics') + SECTIONHEADER = Struct('SECTIONHEADER', '8s', 'Name', 'I', 'VirtualSize', 'I', + 'VirtualAddress', 'I', 'SizeOfRawData', 'I', + 'PointerToRawData', 'I', 'PointerToRelocations', 'I', + 'PointerToLineNumbers', 'H', 'NumberOfRelocations', + 'H', 'NumberOfLineNumbers', 'I', 'Characteristics') section_headers = [] debug_section_index = -1 for i in range(0, coff_header.NumberOfSections): - section_header = SECTIONHEADER.unpack_from( - objdata, offset=COFFHEADER.size() + i * SECTIONHEADER.size()) + section_header = SECTIONHEADER.unpack_from(objdata, + offset=COFFHEADER.size() + + i * SECTIONHEADER.size()) assert not section_header[0].startswith(b'/') # Support short names only. section_headers.append(section_header) @@ -137,7 +125,8 @@ def MakeDeterministic(objdata): # Make sure the symbol table (and hence, string table) appear after the last # section: - assert (coff_header.PointerToSymbolTable >= + assert ( + coff_header.PointerToSymbolTable >= section_headers[-1].PointerToRawData + section_headers[-1].SizeOfRawData) # The symbol table contains a symbol for the no-longer-present .debug$S @@ -150,14 +139,20 @@ def MakeDeterministic(objdata): # - relocations # - line number records (never present) # - one aux symbol entry (IMAGE_SYM_CLASS_CLR_TOKEN; not present in ml output) - SYM = Struct('SYM', - '8s', 'Name', - 'I', 'Value', - 'h', 'SectionNumber', # Note: Signed! - 'H', 'Type', - - 'B', 'StorageClass', - 'B', 'NumberOfAuxSymbols') + SYM = Struct( + 'SYM', + '8s', + 'Name', + 'I', + 'Value', + 'h', + 'SectionNumber', # Note: Signed! + 'H', + 'Type', + 'B', + 'StorageClass', + 'B', + 'NumberOfAuxSymbols') i = 0 debug_sym = -1 while i < coff_header.NumberOfSymbols: @@ -192,10 +187,8 @@ def MakeDeterministic(objdata): # Update symbol table indices in relocations. # There are a few processor types that have one or two relocation types # where SymbolTableIndex has a different meaning, but not for x86. - REL = Struct('REL', - 'I', 'VirtualAddress', - 'I', 'SymbolTableIndex', - 'H', 'Type') + REL = Struct('REL', 'I', 'VirtualAddress', 'I', 'SymbolTableIndex', 'H', + 'Type') for header in section_headers[0:debug_section_index]: for j in range(0, header.NumberOfRelocations): rel_offset = header.PointerToRelocations + j * REL.size() @@ -211,8 +204,9 @@ def MakeDeterministic(objdata): # Now that all indices are updated, remove the symbol table entry referring to # .debug$S and its aux entry. - del objdata[coff_header.PointerToSymbolTable + debug_sym * SYM.size(): - coff_header.PointerToSymbolTable + (debug_sym + 2) * SYM.size()] + del objdata[coff_header.PointerToSymbolTable + + debug_sym * SYM.size():coff_header.PointerToSymbolTable + + (debug_sym + 2) * SYM.size()] # Now we know that it's safe to write out the input data, with just the # timestamp overwritten to 0, the last section header cut out (and the @@ -233,8 +227,9 @@ def MakeDeterministic(objdata): header = Subtract(header, PointerToRelocations=SECTIONHEADER.size()) if header.NumberOfLineNumbers: header = Subtract(header, PointerToLineNumbers=SECTIONHEADER.size()) - SECTIONHEADER.pack_into( - objdata, COFFHEADER.size() + i * SECTIONHEADER.size(), header) + SECTIONHEADER.pack_into(objdata, + COFFHEADER.size() + i * SECTIONHEADER.size(), + header) for i in range(debug_section_index + 1, len(section_headers)): header = section_headers[i] shift = SECTIONHEADER.size() + debug_size @@ -244,8 +239,9 @@ def MakeDeterministic(objdata): header = Subtract(header, PointerToRelocations=shift) if header.NumberOfLineNumbers: header = Subtract(header, PointerToLineNumbers=shift) - SECTIONHEADER.pack_into( - objdata, COFFHEADER.size() + i * SECTIONHEADER.size(), header) + SECTIONHEADER.pack_into(objdata, + COFFHEADER.size() + i * SECTIONHEADER.size(), + header) del objdata[debug_offset:debug_offset + debug_size] @@ -257,9 +253,9 @@ def MakeDeterministic(objdata): NumberOfSymbols=2) COFFHEADER.pack_into(objdata, 0, coff_header) - del objdata[ - COFFHEADER.size() + debug_section_index * SECTIONHEADER.size(): - COFFHEADER.size() + (debug_section_index + 1) * SECTIONHEADER.size()] + del objdata[COFFHEADER.size() + + debug_section_index * SECTIONHEADER.size():COFFHEADER.size() + + (debug_section_index + 1) * SECTIONHEADER.size()] # All done! if sys.version_info.major == 2: diff --git a/build/toolchain/win/rc/linux64/rc.sha1 b/build/toolchain/win/rc/linux64/rc.sha1 index ad14ca46a99b..0d132a05f349 100644 --- a/build/toolchain/win/rc/linux64/rc.sha1 +++ b/build/toolchain/win/rc/linux64/rc.sha1 @@ -1 +1 @@ -2d0c766039264dc2514d005a42f074af4838a446 \ No newline at end of file +1ca25446f5eed4151dc9b43c2a9182433e8f83c0 \ No newline at end of file diff --git a/build/toolchain/win/rc/mac/rc.sha1 b/build/toolchain/win/rc/mac/rc.sha1 index dbd6302a359c..cd9deb9d63cd 100644 --- a/build/toolchain/win/rc/mac/rc.sha1 +++ b/build/toolchain/win/rc/mac/rc.sha1 @@ -1 +1 @@ -4c25c3bcb6608109bb52028d008835895cf72629 \ No newline at end of file +95e7af85589f1102667fc07efe488fd426c483e8 \ No newline at end of file diff --git a/build/toolchain/win/rc/rc.py b/build/toolchain/win/rc/rc.py index 2ab41225fba8..a650506a1e0a 100755 --- a/build/toolchain/win/rc/rc.py +++ b/build/toolchain/win/rc/rc.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2017 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -16,7 +16,6 @@ /nologo Ignored (rc.py doesn't print a logo by default). /showIncludes Print referenced header and resource files.""" -from __future__ import print_function from collections import namedtuple import codecs import os diff --git a/build/toolchain/win/rc/upload_rc_binaries.sh b/build/toolchain/win/rc/upload_rc_binaries.sh index ec4df4cbced9..790b36a6e213 100755 --- a/build/toolchain/win/rc/upload_rc_binaries.sh +++ b/build/toolchain/win/rc/upload_rc_binaries.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. set -eu diff --git a/build/toolchain/win/rc/win/rc.exe.sha1 b/build/toolchain/win/rc/win/rc.exe.sha1 index 3fdbfc0c2069..30c641e383dd 100644 --- a/build/toolchain/win/rc/win/rc.exe.sha1 +++ b/build/toolchain/win/rc/win/rc.exe.sha1 @@ -1 +1 @@ -ba51d69039ffb88310b72b6568efa9f0de148f8f \ No newline at end of file +7d3a485bb5bae0cf3c6b8af95d21f36aa7d02832 \ No newline at end of file diff --git a/build/toolchain/win/setup_toolchain.py b/build/toolchain/win/setup_toolchain.py index c1d2fa2ce0aa..d2f5798ce653 100644 --- a/build/toolchain/win/setup_toolchain.py +++ b/build/toolchain/win/setup_toolchain.py @@ -1,4 +1,4 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # @@ -10,7 +10,6 @@ # win tool. The script assumes that the root build directory is the current dir # and the files will be written to the current directory. -from __future__ import print_function import errno import json @@ -23,6 +22,8 @@ import gn_helpers SCRIPT_DIR = os.path.dirname(__file__) +SDK_VERSION = '10.0.22621.0' + def _ExtractImportantEnvironment(output_of_set): """Extracts environment variables required for the toolchain to run from @@ -58,6 +59,15 @@ def _ExtractImportantEnvironment(output_of_set): # path. Add the path to this python here so that if it's not in the # path when ninja is run later, python will still be found. setting = os.path.dirname(sys.executable) + os.pathsep + setting + if envvar in ['include', 'lib']: + # Make sure that the include and lib paths point to directories that + # exist. This ensures a (relatively) clear error message if the + # required SDK is not installed. + for part in setting.split(';'): + if not os.path.exists(part) and len(part) != 0: + raise Exception( + 'Path "%s" from environment variable "%s" does not exist. ' + 'Make sure the necessary SDK is installed.' % (part, envvar)) env[var.upper()] = setting break if sys.platform in ('win32', 'cygwin'): @@ -151,9 +161,12 @@ def _LoadToolchainEnv(cpu, toolchain_root, sdk_dir, target_store): # doesn't seem to cause problems. if 'VSINSTALLDIR' in os.environ: del os.environ['VSINSTALLDIR'] - del os.environ['INCLUDE'] - del os.environ['LIB'] - del os.environ['LIBPATH'] + if 'INCLUDE' in os.environ: + del os.environ['INCLUDE'] + if 'LIB' in os.environ: + del os.environ['LIB'] + if 'LIBPATH' in os.environ: + del os.environ['LIBPATH'] other_path = os.path.normpath(os.path.join( os.environ['GYP_MSVS_OVERRIDE_PATH'], 'VC/Auxiliary/Build/vcvarsall.bat')) @@ -172,7 +185,7 @@ def _LoadToolchainEnv(cpu, toolchain_root, sdk_dir, target_store): # Explicitly specifying the SDK version to build with to avoid accidentally # building with a new and untested SDK. This should stay in sync with the # packaged toolchain in build/vs_toolchain.py. - args.append('10.0.19041.0') + args.append(SDK_VERSION) variables = _LoadEnvFromBat(args) return _ExtractImportantEnvironment(variables) @@ -240,9 +253,6 @@ def main(): cpus = ('x86', 'x64', 'arm', 'arm64') assert target_cpu in cpus vc_bin_dir = '' - vc_lib_path = '' - vc_lib_atlmfc_path = '' - vc_lib_um_path = '' include = '' lib = '' @@ -265,10 +275,6 @@ def q(s): # Quote s if it contains spaces or other weird characters. env['PATH'] = runtime_dirs + os.pathsep + env['PATH'] vc_bin_dir = FindFileInEnvList(env, 'PATH', os.pathsep, 'cl.exe') - vc_lib_path = FindFileInEnvList(env, 'LIB', ';', 'msvcrt.lib') - vc_lib_atlmfc_path = FindFileInEnvList( - env, 'LIB', ';', 'atls.lib', optional=True) - vc_lib_um_path = FindFileInEnvList(env, 'LIB', ';', 'user32.lib') # The separator for INCLUDE here must match the one used in # _LoadToolchainEnv() above. @@ -284,7 +290,7 @@ def q(s): # Quote s if it contains spaces or other weird characters. if (environment_block_name != ''): env_block = _FormatAsEnvironmentBlock(env) - with open(environment_block_name, 'w') as f: + with open(environment_block_name, 'w', encoding='utf8') as f: f.write(env_block) print('vc_bin_dir = ' + gn_helpers.ToGNString(vc_bin_dir)) @@ -296,15 +302,14 @@ def q(s): # Quote s if it contains spaces or other weird characters. gn_helpers.ToGNString(q('/winsysroot' + relflag(toolchain_root)))) else: print('include_flags_imsvc = ' + gn_helpers.ToGNString(include_imsvc)) - print('vc_lib_path = ' + gn_helpers.ToGNString(vc_lib_path)) - # Possible atlmfc library path gets introduced in the future for store thus - # output result if a result exists. - if (vc_lib_atlmfc_path != ''): - print('vc_lib_atlmfc_path = ' + gn_helpers.ToGNString(vc_lib_atlmfc_path)) - print('vc_lib_um_path = ' + gn_helpers.ToGNString(vc_lib_um_path)) print('paths = ' + gn_helpers.ToGNString(env['PATH'])) assert libpath_flags print('libpath_flags = ' + gn_helpers.ToGNString(libpath_flags)) + if bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1))) and win_sdk_path: + print('libpath_lldlink_flags = ' + + gn_helpers.ToGNString(q('/winsysroot:' + relflag(toolchain_root)))) + else: + print('libpath_lldlink_flags = ' + gn_helpers.ToGNString(libpath_flags)) if __name__ == '__main__': diff --git a/build/toolchain/win/tool_wrapper.py b/build/toolchain/win/tool_wrapper.py index 9327369181be..47bbfe2a09d9 100644 --- a/build/toolchain/win/tool_wrapper.py +++ b/build/toolchain/win/tool_wrapper.py @@ -1,4 +1,4 @@ -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -8,7 +8,6 @@ is used to set up calls to tools used by the build that need wrappers. """ -from __future__ import print_function import os import re @@ -141,10 +140,11 @@ def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args): # Read output one line at a time as it shows up to avoid OOM failures when # GBs of output is produced. for line in link.stdout: - if (not line.startswith(b' Creating library ') - and not line.startswith(b'Generating code') - and not line.startswith(b'Finished generating code')): - print(line) + line = line.decode('utf8') + if (not line.startswith(' Creating library ') + and not line.startswith('Generating code') + and not line.startswith('Finished generating code')): + print(line.rstrip()) return link.wait() def ExecAsmWrapper(self, arch, *args): @@ -155,7 +155,8 @@ def ExecAsmWrapper(self, arch, *args): # separator, convert it to '\\' when running on Windows. args = list(args) # *args is a tuple by default, which is read-only args[0] = args[0].replace('/', '\\') - popen = subprocess.Popen(args, shell=True, env=env, + # See comment in ExecLinkWrapper() for why shell=False on non-win. + popen = subprocess.Popen(args, shell=sys.platform == 'win32', env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, _ = popen.communicate() for line in out.decode('utf8').splitlines(): diff --git a/build/toolchain/win/toolchain.gni b/build/toolchain/win/toolchain.gni new file mode 100644 index 000000000000..968a4a20e131 --- /dev/null +++ b/build/toolchain/win/toolchain.gni @@ -0,0 +1,691 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/clang/clang.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/rust.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/config/win/visual_studio_version.gni") +import("//build/toolchain/cc_wrapper.gni") +import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") +import("//build/toolchain/toolchain.gni") +import("//build/toolchain/win/win_toolchain_data.gni") + +assert(is_win, "Should only be running on Windows") + +# This tool will is used as a wrapper for various commands below. +_tool_wrapper_path = + rebase_path("//build/toolchain/win/tool_wrapper.py", root_build_dir) + +if (host_os == "win") { + _exe = ".exe" +} else { + _exe = "" +} + +_clang_bin_path = rebase_path("$clang_base_path/bin", root_build_dir) + +# Makes a single MSVC toolchain. Callers should normally instead invoke +# "msvc_toolchain" which might make an additional toolchain available +# without sanitizers if required. +# +# Parameters: +# environment: File name of environment file. +# +# You would also define a toolchain_args variable with at least these set: +# current_cpu: current_cpu to pass as a build arg +# current_os: current_os to pass as a build arg +template("single_msvc_toolchain") { + toolchain(target_name) { + # When invoking this toolchain not as the default one, these args will be + # passed to the build. They are ignored when this is the default toolchain. + assert(defined(invoker.toolchain_args)) + toolchain_args = { + forward_variables_from(invoker.toolchain_args, "*") + + # This value needs to be passed through unchanged. + host_toolchain = host_toolchain + + # This value needs to be passed through unchanged. + host_toolchain_no_sanitizers = host_toolchain_no_sanitizers + } + + if (defined(toolchain_args.is_clang)) { + toolchain_is_clang = toolchain_args.is_clang + } else { + toolchain_is_clang = is_clang + } + + # When the invoker has explicitly overridden use_goma or cc_wrapper in the + # toolchain args, use those values, otherwise default to the global one. + # This works because the only reasonable override that toolchains might + # supply for these values are to force-disable them. + if (defined(toolchain_args.use_remoteexec)) { + toolchain_uses_remoteexec = toolchain_args.use_remoteexec + } else { + toolchain_uses_remoteexec = use_remoteexec + } + if (defined(toolchain_args.use_goma)) { + toolchain_uses_goma = toolchain_args.use_goma + } else { + toolchain_uses_goma = use_goma + } + if (defined(toolchain_args.cc_wrapper)) { + toolchain_cc_wrapper = toolchain_args.cc_wrapper + } else { + toolchain_cc_wrapper = cc_wrapper + } + assert(!(toolchain_uses_remoteexec && toolchain_uses_goma), + "Goma and re-client can't be used together.") + assert(!(toolchain_cc_wrapper != "" && toolchain_uses_remoteexec), + "re-client and cc_wrapper can't be used together.") + assert(!(toolchain_cc_wrapper != "" && toolchain_uses_goma), + "Goma and cc_wrapper can't be used together.") + + if (toolchain_uses_remoteexec) { + if (toolchain_is_clang) { + cl_prefix = "${rbe_bin_dir}/rewrapper -cfg=${rbe_cc_cfg_file} -exec_root=${rbe_exec_root} " + } else { + cl_prefix = "" + } + } else if (toolchain_uses_goma) { + cl_prefix = "${goma_dir}/gomacc${_exe} " + } else if (toolchain_cc_wrapper != "" && toolchain_is_clang) { + cl_prefix = toolchain_cc_wrapper + " " + } else { + cl_prefix = "" + } + + cl = "${cl_prefix}${invoker.cl}" + if (host_os == "win") { + # Flip the slashes so that copy/paste of the command works. + cl = string_replace(cl, "/", "\\") + } + + # Make these apply to all tools below. + lib_switch = "" + lib_dir_switch = "/LIBPATH:" + + # Object files go in this directory. + object_subdir = "{{target_out_dir}}/{{label_name}}" + + env = invoker.environment + + if (use_lld) { + # lld-link includes a replacement for lib.exe that can produce thin + # archives and understands bitcode (for lto builds). + link = "${_clang_bin_path}/lld-link${_exe}" + if (host_os == "win") { + # Flip the slashes so that copy/paste of the commands works. + link = string_replace(link, "/", "\\") + } + lib = "$link /lib" + if (host_os != "win") { + # See comment adding --rsp-quoting to $cl above for more information. + link = "$link --rsp-quoting=posix" + } + } else { + lib = "lib.exe" + link = "link.exe" + } + + # If possible, pass system includes as flags to the compiler. When that's + # not possible, load a full environment file (containing %INCLUDE% and + # %PATH%) -- e.g. 32-bit MSVS builds require %PATH% to be set and just + # passing in a list of include directories isn't enough. + if (defined(invoker.sys_include_flags)) { + env_wrapper = "" + sys_include_flags = + "${invoker.sys_include_flags} " # Note trailing space. + } else { + # clang-cl doesn't need this env hoop, so omit it there. + assert(!toolchain_is_clang) + env_wrapper = "ninja -t msvc -e $env -- " # Note trailing space. + sys_include_flags = "" + } + + if (host_os != "win" || (use_lld && defined(invoker.sys_lib_flags))) { + linker_wrapper = "" + sys_lib_flags = "${invoker.sys_lib_flags} " # Note trailing space. + } else { + # link.exe must be run under a wrapper to set up the environment + # (it needs %LIB% set to find libraries), and to work around its bugs. + # Note trailing space: + linker_wrapper = + "\"$python_path\" $_tool_wrapper_path link-wrapper $env False " + sys_lib_flags = "" + } + + if (defined(toolchain_args.use_clang_coverage)) { + toolchain_use_clang_coverage = toolchain_args.use_clang_coverage + } else { + toolchain_use_clang_coverage = use_clang_coverage + } + + if (toolchain_use_clang_coverage) { + assert(toolchain_is_clang, + "use_clang_coverage should only be used with Clang") + if (defined(toolchain_args.coverage_instrumentation_input_file)) { + toolchain_coverage_instrumentation_input_file = + toolchain_args.coverage_instrumentation_input_file + } else { + toolchain_coverage_instrumentation_input_file = + coverage_instrumentation_input_file + } + + coverage_wrapper = + rebase_path("//build/toolchain/clang_code_coverage_wrapper.py", + root_build_dir) + coverage_wrapper = coverage_wrapper + " --target-os=" + target_os + if (toolchain_coverage_instrumentation_input_file != "") { + coverage_wrapper = + coverage_wrapper + " --files-to-instrument=" + + rebase_path(toolchain_coverage_instrumentation_input_file, + root_build_dir) + } + coverage_wrapper = "\"$python_path\" " + coverage_wrapper + " " + } else { + coverage_wrapper = "" + } + + # Disabled with cc_wrapper because of + # https://github.com/mozilla/sccache/issues/1013 + if (toolchain_is_clang && toolchain_cc_wrapper == "") { + # This flag omits system includes from /showIncludes output, to reduce + # the amount of data to parse and store in .ninja_deps. We do this on + # non-Windows too, and already make sure rebuilds after winsdk/libc++/ + # clang header updates happen via changing command line flags. + show_includes = "/showIncludes:user" + } else { + show_includes = "/showIncludes" + } + + tool("cc") { + precompiled_header_type = "msvc" + pdbname = "{{target_out_dir}}/{{label_name}}_c.pdb" + + # Label names may have spaces in them so the pdbname must be quoted. The + # source and output don't need to be quoted because GN knows they're a + # full file name and will quote automatically when necessary. + depsformat = "msvc" + description = "CC {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.obj" ] + + # Note that the code coverage wrapper scripts assumes that {{source}} + # comes immediately after /c. + command = "$coverage_wrapper$env_wrapper$cl /c {{source}} /nologo $show_includes $sys_include_flags{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} /Fo{{output}} /Fd\"$pdbname\"" + } + + tool("cxx") { + precompiled_header_type = "msvc" + + # The PDB name needs to be different between C and C++ compiled files. + pdbname = "{{target_out_dir}}/{{label_name}}_cc.pdb" + + # See comment in CC tool about quoting. + depsformat = "msvc" + description = "CXX {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.obj" ] + + # Note that the code coverage wrapper scripts assumes that {{source}} + # comes immediately after /c. + command = "$coverage_wrapper$env_wrapper$cl /c {{source}} /Fo{{output}} /nologo $show_includes $sys_include_flags{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} /Fd\"$pdbname\"" + } + + tool("rc") { + command = "\"$python_path\" $_tool_wrapper_path rc-wrapper $env rc.exe /nologo $sys_include_flags{{defines}} {{include_dirs}} /fo{{output}} {{source}}" + depsformat = "msvc" + outputs = [ "$object_subdir/{{source_name_part}}.res" ] + description = "RC {{output}}" + } + + tool("asm") { + is_msvc_assembler = true + + if (toolchain_args.current_cpu == "arm64") { + if (toolchain_is_clang) { + ml = "${cl_prefix}${_clang_bin_path}/clang-cl${_exe} --target=arm64-windows" + if (host_os == "win") { + # Flip the slashes so that copy/paste of the command works. + ml = string_replace(ml, "/", "\\") + } + ml += " -c -o{{output}}" + is_msvc_assembler = false + } else { + # Only affects Arm builds with is_clang = false, implemented for + # building V8 for Windows on Arm systems with the MSVC toolchain. + ml = "armasm64.exe" + } + } else { + if (toolchain_is_clang && !disable_llvm_ml) { + prefix = rebase_path("$clang_base_path/bin", root_build_dir) + ml = "$prefix/llvm-ml${_exe}" + if (toolchain_args.current_cpu == "x64") { + ml += " -m64" + } else { + ml += " -m32" + } + } else { + if (toolchain_args.current_cpu == "x64") { + ml = "ml64.exe" + } else { + ml = "ml.exe" + } + } + } + + if (is_msvc_assembler) { + ml += " /nologo /Fo{{output}}" + + # Suppress final-stage linking on x64/x86 builds. (Armasm64 does not + # require /c because it doesn't support linking.) + if (toolchain_args.current_cpu != "arm64") { + ml += " /c" + } + if (use_lld && (!toolchain_is_clang || disable_llvm_ml)) { + # Wrap ml(64).exe with a script that makes its output deterministic. + # It's lld only because the script zaps obj Timestamp which + # link.exe /incremental looks at. + ml_py = rebase_path("//build/toolchain/win/ml.py", root_build_dir) + ml = "\"$python_path\" $ml_py $ml" + } + } + if (toolchain_args.current_cpu != "arm64" || toolchain_is_clang) { + # TODO(thakis): Stop using asm-wrapper when using clang. + command = "\"$python_path\" $_tool_wrapper_path asm-wrapper $env $ml {{defines}} {{include_dirs}} {{asmflags}} {{source}}" + } else { + # armasm64.exe does not support definitions passed via the command + # line. (Fortunately, they're not needed for compiling the V8 + # snapshot, which is the only time this assembler is required.) + command = "\"$python_path\" $_tool_wrapper_path asm-wrapper $env $ml {{include_dirs}} {{asmflags}} {{source}}" + } + + description = "ASM {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.obj" ] + } + + if (toolchain_has_rust) { + rustc_wrapper = rebase_path("//build/rust/rustc_wrapper.py") + rustc = rebase_path("${rust_sysroot}/bin/rustc", root_build_dir) + rust_sysroot_relative_to_out = rebase_path(rust_sysroot, root_out_dir) + rustc_windows_args = " -Clinker=$link $rustc_common_args" + + tool("rust_staticlib") { + libname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + rspfile = "$libname.rsp" + depfile = "$libname.d" + + default_output_extension = ".lib" + output_prefix = "lib" + default_output_dir = "{{root_out_dir}}" + description = "RUST(STATICLIB) {{output}}" + outputs = [ libname ] + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- $rustc_windows_args --emit=dep-info=$depfile,link -o $libname LDFLAGS RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_rlib") { + # We must always prefix with `lib` even if the library already starts + # with that prefix or else our stdlib is unable to find libc.rlib (or + # actually liblibc.rlib). + rlibname = + "{{output_dir}}/lib{{target_output_name}}{{output_extension}}" + depfile = "$rlibname.d" + + # Do not use rsp files in this (common) case because they occupy the + # ninja main thread, and {{rlibs}} have shorter command lines than + # fully linked targets. + + default_output_extension = ".rlib" + + # This is prefixed unconditionally in `rlibname`. + # output_prefix = "lib" + default_output_dir = "{{root_out_dir}}" + description = "RUST {{output}}" + outputs = [ rlibname ] + + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile -- $rustc_windows_args --emit=dep-info=$depfile,link -o $rlibname {{rustdeps}} {{externs}} LDFLAGS RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_bin") { + exename = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + pdbname = "$exename.pdb" + rspfile = "$exename.rsp" + depfile = "$exename.d" + pool = "//build/toolchain:link_pool($default_toolchain)" + + default_output_extension = ".exe" + default_output_dir = "{{root_out_dir}}" + description = "RUST(BIN) {{output}}" + outputs = [ + # The first entry here is used for dependency tracking. + exename, + pdbname, + ] + runtime_outputs = outputs + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- $rustc_windows_args --emit=dep-info=$depfile,link -o $exename LDFLAGS {{ldflags}} $sys_lib_flags /PDB:$pdbname RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_cdylib") { + # E.g. "foo.dll": + dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + libname = "$dllname.lib" # e.g. foo.dll.lib + pdbname = "$dllname.pdb" + rspfile = "$dllname.rsp" + depfile = "$dllname.d" + pool = "//build/toolchain:link_pool($default_toolchain)" + + default_output_extension = ".dll" + default_output_dir = "{{root_out_dir}}" + description = "RUST(CDYLIB) {{output}}" + outputs = [ + # The first entry here is used for dependency tracking. Dylibs are + # linked into other targets and that linking must be done through + # the .lib file, not the .dll file. So the .lib file is the primary + # output here. + libname, + dllname, + pdbname, + ] + runtime_outputs = [ + dllname, + pdbname, + ] + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- $rustc_windows_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} $sys_lib_flags /PDB:$pdbname /IMPLIB:$libname RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + + # Since the above commands only updates the .lib file when it changes, + # ask Ninja to check if the timestamp actually changed to know if + # downstream dependencies should be recompiled. + restat = true + } + + tool("rust_macro") { + # E.g. "foo.dll": + dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + pdbname = "$dllname.pdb" + rspfile = "$dllname.rsp" + depfile = "$dllname.d" + pool = "//build/toolchain:link_pool($default_toolchain)" + + default_output_extension = ".dll" + default_output_dir = "{{root_out_dir}}" + description = "RUST(MACRO) {{output}}" + outputs = [ + # The first entry here is used for dependency tracking. Proc macros + # are consumed as dlls directly, loaded a runtime, so the dll is the + # primary output here. If we make a .lib file the primary output, we + # end up trying to load the .lib file as a procmacro which fails. + # + # Since depending on a macro target for linking would fail (it would + # try to link primary .dll target) we omit the .lib here entirely. + dllname, + pdbname, + ] + runtime_outputs = outputs + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- $rustc_windows_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} $sys_lib_flags /PDB:$pdbname RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + + # Since the above commands only updates the .lib file when it changes, + # ask Ninja to check if the timestamp actually changed to know if + # downstream dependencies should be recompiled. + restat = true + } + } + + tool("alink") { + rspfile = "{{output}}.rsp" + command = + "$linker_wrapper$lib /OUT:{{output}} /nologo {{arflags}} @$rspfile" + description = "LIB {{output}}" + outputs = [ + # Ignore {{output_extension}} and always use .lib, there's no reason to + # allow targets to override this extension on Windows. + "{{output_dir}}/{{target_output_name}}.lib", + ] + default_output_extension = ".lib" + default_output_dir = "{{target_out_dir}}" + + # The use of inputs_newline is to work around a fixed per-line buffer + # size in the linker. + rspfile_content = "{{inputs_newline}}" + } + + tool("solink") { + # E.g. "foo.dll": + dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + libname = "${dllname}.lib" # e.g. foo.dll.lib + pdbname = "${dllname}.pdb" + rspfile = "${dllname}.rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + command = "$linker_wrapper$link /OUT:$dllname /nologo ${sys_lib_flags}/IMPLIB:$libname /DLL /PDB:$pdbname @$rspfile" + + default_output_extension = ".dll" + default_output_dir = "{{root_out_dir}}" + description = "LINK(DLL) {{output}}" + outputs = [ + dllname, + libname, + pdbname, + ] + link_output = libname + depend_output = libname + runtime_outputs = [ + dllname, + pdbname, + ] + + # Since the above commands only updates the .lib file when it changes, + # ask Ninja to check if the timestamp actually changed to know if + # downstream dependencies should be recompiled. + restat = true + + # The use of inputs_newline is to work around a fixed per-line buffer + # size in the linker. + rspfile_content = + "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}} {{rlibs}}" + } + + tool("solink_module") { + # E.g. "foo.dll": + dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + pdbname = "${dllname}.pdb" + rspfile = "${dllname}.rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + command = "$linker_wrapper$link /OUT:$dllname /nologo ${sys_lib_flags}/DLL /PDB:$pdbname @$rspfile" + + default_output_extension = ".dll" + default_output_dir = "{{root_out_dir}}" + description = "LINK_MODULE(DLL) {{output}}" + outputs = [ + dllname, + pdbname, + ] + runtime_outputs = outputs + + # The use of inputs_newline is to work around a fixed per-line buffer + # size in the linker. + rspfile_content = + "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}} {{rlibs}}" + } + + tool("link") { + exename = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + pdbname = "$exename.pdb" + rspfile = "$exename.rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + command = "$linker_wrapper$link /OUT:$exename /nologo ${sys_lib_flags} /PDB:$pdbname @$rspfile" + + default_output_extension = ".exe" + default_output_dir = "{{root_out_dir}}" + description = "LINK {{output}}" + outputs = [ + exename, + pdbname, + ] + runtime_outputs = outputs + + # The use of inputs_newline is to work around a fixed per-line buffer + # size in the linker. + rspfile_content = + "{{inputs_newline}} {{libs}} {{solibs}} {{ldflags}} {{rlibs}}" + } + + # These two are really entirely generic, but have to be repeated in + # each toolchain because GN doesn't allow a template to be used here. + # See //build/toolchain/toolchain.gni for details. + tool("stamp") { + command = stamp_command + description = stamp_description + pool = "//build/toolchain:action_pool($default_toolchain)" + } + tool("copy") { + command = copy_command + description = copy_description + pool = "//build/toolchain:action_pool($default_toolchain)" + } + + tool("action") { + pool = "//build/toolchain:action_pool($default_toolchain)" + } + } +} + +# Makes a single MSVC toolchain, or possibly two if we +# need an additional toolchain without sanitizers enabled. +template("msvc_toolchain") { + single_msvc_toolchain(target_name) { + assert(defined(invoker.toolchain_args), + "Toolchains must declare toolchain_args") + forward_variables_from(invoker, + "*", + [ + "visibility", + "test_only", + ]) + + # No need to forward visibility and test_only as they apply to targets not + # toolchains, but presubmit checks require that we explicitly exclude them + } + + if (using_sanitizer) { + # Make an additional toolchain with no sanitizers. + single_msvc_toolchain("${target_name}_no_sanitizers") { + assert(defined(invoker.toolchain_args), + "Toolchains must declare toolchain_args") + forward_variables_from(invoker, + "*", + [ + "toolchain_args", + "visibility", + "test_only", + ]) + toolchain_args = { + # Populate toolchain args from the invoker. + forward_variables_from(invoker.toolchain_args, "*") + toolchain_disables_sanitizers = true + } + } + } +} + +template("win_toolchains") { + assert(defined(invoker.toolchain_arch)) + toolchain_arch = invoker.toolchain_arch + + if (toolchain_arch == "x86") { + win_toolchain_data = win_toolchain_data_x86 + } else if (toolchain_arch == "x64") { + win_toolchain_data = win_toolchain_data_x64 + } else if (toolchain_arch == "arm64") { + win_toolchain_data = win_toolchain_data_arm64 + } else { + error("Unsupported toolchain_arch, add it to win_toolchain_data.gni") + } + + # The toolchain using MSVC only makes sense when not doing cross builds. + # Chromium exclusively uses the win_clang_ toolchain below, but V8 and + # WebRTC still use this MSVC toolchain in some cases. + if (host_os == "win") { + if (defined(invoker.cl_toolchain_prefix)) { + cl_toolchain_prefix = invoker.cl_toolchain_prefix + } else { + cl_toolchain_prefix = "" + } + msvc_toolchain(cl_toolchain_prefix + target_name) { + environment = "environment." + toolchain_arch + cl = "\"${win_toolchain_data.vc_bin_dir}/cl.exe\"" + + toolchain_args = { + if (defined(invoker.toolchain_args)) { + forward_variables_from(invoker.toolchain_args, "*") + } + is_clang = false + use_clang_coverage = false + current_os = "win" + current_cpu = toolchain_arch + } + } + } + + if (defined(invoker.clang_toolchain_prefix)) { + clang_toolchain_prefix = invoker.clang_toolchain_prefix + } else { + clang_toolchain_prefix = "win_clang_" + } + msvc_toolchain(clang_toolchain_prefix + target_name) { + environment = "environment." + toolchain_arch + cl = "${_clang_bin_path}/clang-cl${_exe}" + _clang_lib_dir = + rebase_path("$clang_base_path/lib/clang/$clang_version/lib/windows", + root_build_dir) + if (host_os == "win") { + # And to match the other -libpath flags. + _clang_lib_dir = string_replace(_clang_lib_dir, "/", "\\") + } + + sys_include_flags = "${win_toolchain_data.include_flags_imsvc}" + if (use_lld) { + sys_lib_flags = + "-libpath:$_clang_lib_dir ${win_toolchain_data.libpath_lldlink_flags}" + + # TODO(thakis): Remove once crbug.com/1300005 is fixed + assert(toolchain_arch == "x64" || toolchain_arch == "x86" || + toolchain_arch == "arm" || toolchain_arch == "arm64", + "Only supports x64, x86, arm and arm64 CPUs") + if (toolchain_arch == "x64") { + sys_lib_flags += " /MACHINE:X64" + } else if (toolchain_arch == "x86") { + sys_lib_flags += " /MACHINE:X86" + } else if (toolchain_arch == "arm") { + sys_lib_flags += " /MACHINE:ARM" + } else if (toolchain_arch == "arm64") { + sys_lib_flags += " /MACHINE:ARM64" + } + } + + toolchain_args = { + if (defined(invoker.toolchain_args)) { + forward_variables_from(invoker.toolchain_args, "*") + } + is_clang = true + current_os = "win" + current_cpu = toolchain_arch + } + } +} diff --git a/build/toolchain/win/win_toolchain_data.gni b/build/toolchain/win/win_toolchain_data.gni new file mode 100644 index 000000000000..505d0ce5049f --- /dev/null +++ b/build/toolchain/win/win_toolchain_data.gni @@ -0,0 +1,43 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/win/visual_studio_version.gni") + +declare_args() { + win_toolchain_data_x86 = + exec_script("//build/toolchain/win/setup_toolchain.py", + [ + visual_studio_path, + windows_sdk_path, + visual_studio_runtime_dirs, + "win", + "x86", + "environment.x86", + ], + "scope") + + win_toolchain_data_x64 = + exec_script("//build/toolchain/win/setup_toolchain.py", + [ + visual_studio_path, + windows_sdk_path, + visual_studio_runtime_dirs, + "win", + "x64", + "environment.x64", + ], + "scope") + + win_toolchain_data_arm64 = + exec_script("//build/toolchain/win/setup_toolchain.py", + [ + visual_studio_path, + windows_sdk_path, + visual_studio_runtime_dirs, + "win", + "arm64", + "environment.arm64", + ], + "scope") +} diff --git a/build/toolchain/wrapper_utils.py b/build/toolchain/wrapper_utils.py index 5949a3727c79..f01e159fd69c 100644 --- a/build/toolchain/wrapper_utils.py +++ b/build/toolchain/wrapper_utils.py @@ -1,4 +1,4 @@ -# Copyright (c) 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -13,6 +13,8 @@ import sys import threading +import whole_archive + _BAT_PREFIX = 'cmd /c call ' @@ -70,6 +72,10 @@ def RunLinkWithOptionalMapFile(command, env=None, map_file=None): elif map_file: command.append('-Wl,-Map,' + map_file) + # We want to link rlibs as --whole-archive if they are part of a unit test + # target. This is determined by switch `-LinkWrapper,add-whole-archive`. + command = whole_archive.wrap_with_whole_archive(command) + result = subprocess.call(command, env=env) if tmp_map_path and result == 0: diff --git a/build/toolchain/zos/BUILD.gn b/build/toolchain/zos/BUILD.gn new file mode 100644 index 000000000000..3af5f8033d82 --- /dev/null +++ b/build/toolchain/zos/BUILD.gn @@ -0,0 +1,174 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is based on gcc_toolchain.gni and customized for z/OS. + +import("//build/toolchain/gcc_toolchain.gni") + +toolchain("s390x") { + cc = "xlclang" + cxx = "xlclang++" + asm = "xlclang" + ar = "ar" + ld = cxx + + toolchain_args = { + current_cpu = "s390x" + current_os = "zos" + } + + rebuild_string = "" + default_shlib_extension = ".so" + default_shlib_subdir = "" + extra_cflags = "" + extra_cppflags = "" + extra_cxxflags = "" + extra_asmflags = "" + extra_ldflags = "" + + # These library switches can apply to all tools below. + lib_switch = "-l" + lib_dir_switch = "-L" + + # Object files go in this directory. + object_subdir = "{{target_out_dir}}/{{label_name}}" + + tool("cc") { + depfile = "{{output}}.d" + command = "$cc -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}${extra_cflags} -c {{source}} -o {{output}}" + depsformat = "gcc" + description = "CC {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.o" ] + } + + tool("cxx") { + depfile = "{{output}}.d" + command = "$cxx -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}}${extra_cppflags}${extra_cxxflags} -c {{source}} -o {{output}}" + depsformat = "gcc" + description = "CXX {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.o" ] + } + + tool("asm") { + # Just use the C compiler to compile assembly. + depfile = "{{output}}.d" + command = "$asm -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{asmflags}}${extra_asmflags} -c {{source}} -o {{output}}" + depsformat = "gcc" + description = "ASM {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.o" ] + } + + tool("alink") { + command = "$ar {{arflags}} -r -c -s {{output}} {{inputs}}" + + # Remove the output file first so that ar doesn't try to modify the + # existing file. + command = "rm -f {{output}} && $command" + + # Almost all targets build with //build/config/compiler:thin_archive which + # adds -T to arflags. + description = "AR {{output}}" + outputs = [ "{{output_dir}}/{{target_output_name}}{{output_extension}}" ] + + # Shared libraries go in the target out directory by default so we can + # generate different targets with the same name and not have them collide. + default_output_dir = "{{target_out_dir}}" + default_output_extension = ".a" + output_prefix = "lib" + } + + tool("solink") { + soname = "{{target_output_name}}{{output_extension}}" # e.g. "libfoo.so". + sofile = "{{output_dir}}/$soname" # Possibly including toolchain dir. + xfile = "{{output_dir}}/{{target_output_name}}.x" + rspfile = sofile + ".rsp" + + # These variables are not built into GN but are helpers that + # implement (1) linking to produce a .so, (2) extracting the symbols + # from that file (3) if the extracted list differs from the existing + # .TOC file, overwrite it, otherwise, don't change it. + link_command = "$ld -Wl,DLL {{ldflags}}${extra_ldflags} -o \"$sofile\" `cat $rspfile`" + + solink_wrapper = + rebase_path("//build/toolchain/gcc_link_wrapper.py", root_build_dir) + command = "$python_path \"$solink_wrapper\" --output=\"$sofile\" -- $link_command" + + rspfile_content = "{{inputs}} {{solibs}} {{libs}}" + + description = "SOLINK $sofile" + + # Use this for {{output_extension}} expansions unless a target manually + # overrides it (in which case {{output_extension}} will be what the target + # specifies). + default_output_extension = default_shlib_extension + + default_output_dir = "{{root_out_dir}}${default_shlib_subdir}" + + output_prefix = "lib" + + # Since the above commands only updates the .TOC file when it changes, ask + # Ninja to check if the timestamp actually changed to know if downstream + # dependencies should be recompiled. + restat = true + + # Tell GN about the output files. It will link to the sofile but use the + # tocfile for dependency management. + outputs = [ xfile ] + outputs += [ sofile ] + + link_output = xfile + depend_output = xfile + } + + tool("solink_module") { + soname = "{{target_output_name}}{{output_extension}}" # e.g. "libfoo.so". + sofile = "{{output_dir}}/$soname" + xfile = "{{output_dir}}/{{target_output_name}}.x" + + rspfile = sofile + ".rsp" + + command = "$ld {{ldflags}}${extra_ldflags} -o \"$sofile\" `cat $rspfile`" + + rspfile_content = "{{inputs}} {{solibs}} {{libs}}" + + description = "SOLINK_MODULE $sofile" + + default_output_dir = "{{root_out_dir}}${default_shlib_subdir}" + + output_prefix = "lib" + outputs = [ xfile ] + outputs += [ sofile ] + } + + tool("link") { + exename = "{{target_output_name}}{{output_extension}}" + outfile = "{{output_dir}}/$exename" + rspfile = "$outfile.rsp" + + default_output_dir = "{{root_out_dir}}" + + link_command = "$ld {{ldflags}}${extra_ldflags} -o \"$outfile\" `cat $rspfile` {{solibs}} {{libs}}" + + link_wrapper = + rebase_path("//build/toolchain/gcc_link_wrapper.py", root_build_dir) + + command = "$python_path \"$link_wrapper\" --output=\"$outfile\" -- $link_command" + + description = "LINK $outfile" + rspfile_content = "{{inputs}}" + outputs = [ outfile ] + } + + # These two are really entirely generic, but have to be repeated in + # each toolchain because GN doesn't allow a template to be used here. + # See //build/toolchain/toolchain.gni for details. + tool("stamp") { + command = stamp_command + description = stamp_description + } + tool("copy") { + command = copy_command + description = copy_description + } +} diff --git a/build/tree_truth.sh b/build/tree_truth.sh index 617092dc8a43..00150f0740ff 100755 --- a/build/tree_truth.sh +++ b/build/tree_truth.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # diff --git a/build/update-linux-sandbox.sh b/build/update-linux-sandbox.sh index d24cf2602d7a..1d2442483c5d 100755 --- a/build/update-linux-sandbox.sh +++ b/build/update-linux-sandbox.sh @@ -1,6 +1,6 @@ #!/bin/sh -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/util/BUILD.gn b/build/util/BUILD.gn index 2ba66a4672df..a96d326776a9 100644 --- a/build/util/BUILD.gn +++ b/build/util/BUILD.gn @@ -1,19 +1,19 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/util/lastchange.gni") -action("webkit_version") { +action("chromium_git_revision") { script = "version.py" - template_file = "webkit_version.h.in" + template_file = "chromium_git_revision.h.in" inputs = [ lastchange_file, template_file, ] - output_file = "$target_gen_dir/webkit_version.h" + output_file = "$target_gen_dir/chromium_git_revision.h" outputs = [ output_file ] args = [ @@ -28,18 +28,10 @@ action("webkit_version") { ] } -action("chrome_version_json") { - script = "version.py" - _chrome_version_path = "//chrome/VERSION" - inputs = [ _chrome_version_path ] - _output_file = "$root_gen_dir/CHROME_VERSION.json" - outputs = [ _output_file ] - args = [ - "--file", - rebase_path(_chrome_version_path, root_build_dir), - "--template", - "{\"full-quoted\": \"\\\"@MAJOR@.@MINOR@.@BUILD@.@PATCH@\\\"\"}", - "--output", - rebase_path(_output_file, root_build_dir), +group("test_results") { + data = [ + "//.vpython3", + "//build/util/lib/__init__.py", + "//build/util/lib/results/", ] } diff --git a/build/util/PRESUBMIT.py b/build/util/PRESUBMIT.py index 1e0fc8c80262..88fd9bf5b323 100644 --- a/build/util/PRESUBMIT.py +++ b/build/util/PRESUBMIT.py @@ -1,4 +1,4 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -6,6 +6,9 @@ """Presubmit for build/util""" +USE_PYTHON3 = True + + def _GetFilesToSkip(input_api): files_to_skip = [] affected_files = input_api.change.AffectedFiles() @@ -37,7 +40,10 @@ def _GetPythonUnitTests(input_api, output_api): output_api, input_api.PresubmitLocalPath(), files_to_check=['.*_test\\.py$'], - files_to_skip=files_to_skip) + files_to_skip=files_to_skip, + run_on_python2=False, + run_on_python3=True, + skip_shebang_check=True) def CommonChecks(input_api, output_api): diff --git a/build/util/action_remote.py b/build/util/action_remote.py new file mode 100755 index 000000000000..ea2e132442db --- /dev/null +++ b/build/util/action_remote.py @@ -0,0 +1,146 @@ +#!/usr/bin/env python3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Wrapper script to run action remotely through rewrapper with gn. + +Also includes Chromium-specific input processors which don't make sense to +be reclient inbuilt input processors.""" + +import argparse +import json +import os +import subprocess +import sys +from enum import Enum + +_THIS_DIR = os.path.realpath(os.path.dirname(__file__)) +_SRC_DIR = os.path.dirname(os.path.dirname(_THIS_DIR)) +_MOJOM_DIR = os.path.join(_SRC_DIR, 'mojo', 'public', 'tools', 'mojom') + + +class CustomProcessor(Enum): + mojom_parser = 'mojom_parser' + + def __str__(self): + return self.value + + +def _process_build_metadata_json(bm_file, input_roots, output_root, re_outputs, + processed_inputs): + """Recursively find mojom_parser inputs from a build_metadata file.""" + # Import Mojo-specific dep here so non-Mojo remote actions don't need it. + if _MOJOM_DIR not in sys.path: + sys.path.insert(0, _MOJOM_DIR) + from mojom_parser import RebaseAbsolutePath + + if bm_file in processed_inputs: + return + + processed_inputs.add(bm_file) + + bm_dir = os.path.dirname(bm_file) + + with open(bm_file) as f: + bm = json.load(f) + + # All sources and corresponding module files are inputs. + for s in bm["sources"]: + src = os.path.normpath(os.path.join(bm_dir, s)) + if src not in processed_inputs and os.path.exists(src): + processed_inputs.add(src) + src_module = os.path.join( + output_root, + RebaseAbsolutePath(os.path.abspath(src), input_roots) + "-module") + if src_module in re_outputs: + continue + if src_module not in processed_inputs and os.path.exists(src_module): + processed_inputs.add(src_module) + + # Recurse into build_metadata deps. + for d in bm["deps"]: + dep = os.path.normpath(os.path.join(bm_dir, d)) + _process_build_metadata_json(dep, input_roots, output_root, re_outputs, + processed_inputs) + + +def _get_mojom_parser_inputs(exec_root, output_files, extra_args): + """Get mojom inputs by walking generated build_metadata files. + + This is less complexity and disk I/O compared to parsing mojom files for + imports and finding all imports. + + Start from the root build_metadata file passed to mojom_parser's + --check-imports flag. + """ + argparser = argparse.ArgumentParser() + argparser.add_argument('--check-imports', dest='check_imports', required=True) + argparser.add_argument('--output-root', dest='output_root', required=True) + argparser.add_argument('--input-root', + default=[], + action='append', + dest='input_root_paths') + mojom_parser_args, _ = argparser.parse_known_args(args=extra_args) + + input_roots = list(map(os.path.abspath, mojom_parser_args.input_root_paths)) + output_root = os.path.abspath(mojom_parser_args.output_root) + processed_inputs = set() + _process_build_metadata_json(mojom_parser_args.check_imports, input_roots, + output_root, output_files, processed_inputs) + + # Rebase paths onto rewrapper exec root. + return map(lambda dep: os.path.normpath(os.path.relpath(dep, exec_root)), + processed_inputs) + + +def main(): + # Set up argparser with some rewrapper flags. + argparser = argparse.ArgumentParser(description='rewrapper executor for gn', + allow_abbrev=False) + argparser.add_argument('--custom_processor', + type=CustomProcessor, + choices=list(CustomProcessor)) + argparser.add_argument('rewrapper_path') + argparser.add_argument('--input_list_paths') + argparser.add_argument('--output_list_paths') + argparser.add_argument('--exec_root') + parsed_args, extra_args = argparser.parse_known_args() + + # This script expects to be calling rewrapper. + args = [parsed_args.rewrapper_path] + + # Get the output files list. + output_files = set() + with open(parsed_args.output_list_paths, 'r') as file: + for line in file: + output_files.add(line.rstrip('\n')) + + # Scan for and add explicit inputs for rewrapper if necessary. + # These should be in a new input list paths file, as using --inputs can fail + # if the list is extremely large. + if parsed_args.custom_processor == CustomProcessor.mojom_parser: + root, ext = os.path.splitext(parsed_args.input_list_paths) + extra_inputs = _get_mojom_parser_inputs(parsed_args.exec_root, output_files, + extra_args) + extra_input_list_path = '%s__extra%s' % (root, ext) + with open(extra_input_list_path, 'w') as file: + with open(parsed_args.input_list_paths, 'r') as inputs: + file.write(inputs.read()) + file.write("\n".join(extra_inputs)) + args += ["--input_list_paths=%s" % extra_input_list_path] + else: + args += ["--input_list_paths=%s" % parsed_args.input_list_paths] + + # Filter out --custom_processor= which is a flag for this script, + # and filter out --input_list_paths= because we replace it above. + # Pass on the rest of the args to rewrapper. + args_rest = filter(lambda arg: '--custom_processor=' not in arg, sys.argv[2:]) + args += filter(lambda arg: '--input_list_paths=' not in arg, args_rest) + + # Run rewrapper. + proc = subprocess.run(args) + return proc.returncode + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/util/android_chrome_version.py b/build/util/android_chrome_version.py old mode 100644 new mode 100755 index c06bb38b74b7..151081af209e --- a/build/util/android_chrome_version.py +++ b/build/util/android_chrome_version.py @@ -1,4 +1,5 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Different build variants of Chrome for Android have different version codes. @@ -23,6 +24,13 @@ WEBVIEW_STABLE, WEBVIEW_BETA, WEBVIEW_DEV are all used for standalone webview, whereas the others are used for various chrome APKs. +TRICHROME_BETA is used for TrichromeChrome, TrichromeWebView, and +TrichromeLibrary when these are compiled to use the stable package name. Similar +to how WEBVIEW_STABLE/WEBVIEW_BETA work, this allows users to opt into the open +Beta Track for the stable package. When Trichrome is configured to use a +distinct package name for the Beta package, the version code will use TRICHROME +instead of TRICHROME_BETA. + Note that a package digit of '3' for Webview is reserved for Trichrome Webview. The same versionCode is used for both Trichrome Chrome and Trichrome Webview. @@ -37,23 +45,30 @@ """ +import argparse +from collections import namedtuple + # Package name version bits. _PACKAGE_NAMES = { 'CHROME': 0, 'CHROME_MODERN': 10, 'MONOCHROME': 20, 'TRICHROME': 30, + 'TRICHROME_BETA': 40, + 'TRICHROME_AUTO': 50, 'WEBVIEW_STABLE': 0, 'WEBVIEW_BETA': 10, 'WEBVIEW_DEV': 20, } +""" "Next" builds get +500 on their patch number. -""" "Next" builds get +5 on their package version code digit. - -We choose 5 because it won't conflict with values in _PACKAGE_NAMES. +This ensures that they are considered "newer" than any non-next build of the +same branch number; this is a workaround for Android requiring a total ordering +of versions when we only really have a partial ordering. This assumes that the +actual patch number will never reach 500, which has never even come close in +the past. """ -_NEXT_BUILD_VERSION_CODE_DIFF = 50 - +_NEXT_BUILD_VERSION_CODE_DIFF = 50000 """List of version numbers to be created for each build configuration. Tuple format: @@ -62,7 +77,7 @@ Here, (supported ABIs) is referring to the combination of browser ABI and webview library ABI present in a particular APK. For example, 64_32 implies a 64-bit browser with an extra 32-bit Webview library. See also -_ABIS_TO_BIT_MASK. +_ABIS_TO_DIGIT_MASK. """ _APKS = { '32': [ @@ -70,6 +85,7 @@ ('CHROME_MODERN', 'CHROME_MODERN', '32'), ('MONOCHROME', 'MONOCHROME', '32'), ('TRICHROME', 'TRICHROME', '32'), + ('TRICHROME_BETA', 'TRICHROME_BETA', '32'), ('WEBVIEW_STABLE', 'WEBVIEW_STABLE', '32'), ('WEBVIEW_BETA', 'WEBVIEW_BETA', '32'), ('WEBVIEW_DEV', 'WEBVIEW_DEV', '32'), @@ -86,13 +102,24 @@ ('TRICHROME_32', 'TRICHROME', '32'), ('TRICHROME_32_64', 'TRICHROME', '32_64'), ('TRICHROME_64_32', 'TRICHROME', '64_32'), + ('TRICHROME_64_32_HIGH', 'TRICHROME', '64_32_high'), ('TRICHROME_64', 'TRICHROME', '64'), + ('TRICHROME_AUTO_64_32', 'TRICHROME_AUTO', '64_32'), + ('TRICHROME_BETA', 'TRICHROME_BETA', '32_64'), + ('TRICHROME_32_BETA', 'TRICHROME_BETA', '32'), + ('TRICHROME_32_64_BETA', 'TRICHROME_BETA', '32_64'), + ('TRICHROME_64_32_BETA', 'TRICHROME_BETA', '64_32'), + ('TRICHROME_64_32_HIGH_BETA', 'TRICHROME_BETA', '64_32_high'), + ('TRICHROME_64_BETA', 'TRICHROME_BETA', '64'), ('WEBVIEW_STABLE', 'WEBVIEW_STABLE', '32_64'), ('WEBVIEW_BETA', 'WEBVIEW_BETA', '32_64'), ('WEBVIEW_DEV', 'WEBVIEW_DEV', '32_64'), ('WEBVIEW_32_STABLE', 'WEBVIEW_STABLE', '32'), ('WEBVIEW_32_BETA', 'WEBVIEW_BETA', '32'), ('WEBVIEW_32_DEV', 'WEBVIEW_DEV', '32'), + ('WEBVIEW_64_STABLE', 'WEBVIEW_STABLE', '64'), + ('WEBVIEW_64_BETA', 'WEBVIEW_BETA', '64'), + ('WEBVIEW_64_DEV', 'WEBVIEW_DEV', '64'), ] } @@ -102,7 +129,6 @@ 'arm64': ('arm', '64'), 'x86': ('intel', '32'), 'x64': ('intel', '64'), - 'mipsel': ('mipsel', '32'), } # Expose the available choices to other scripts. @@ -145,26 +171,126 @@ version on a 64-bit device, otherwise it won't work properly. So, the 64-bit version needs to be a higher versionCode, as otherwise a 64-bit device would prefer the 32-bit version that does not include any 64-bit code, and fail. -- The relative order of mips isn't important, but it needs to be a *distinct* - value to the other architectures because all builds need unique version codes. """ -_ABIS_TO_BIT_MASK = { - 'arm': { - '32': 0, - '32_64': 3, - '64_32': 4, - '64': 5, - }, - 'intel': { - '32': 1, - '32_64': 6, - '64_32': 7, - '64': 8, - }, - 'mipsel': { - '32': 2, + + +def _GetAbisToDigitMask(build_number): + """Return the correct digit mask based on build number. + + Updated from build 5750: Some intel devices advertise support for arm, + so arm codes must be lower than x86 codes to prevent providing an + arm-optimized build to intel devices. + + Cherry-picked to 5735 to support releasing the new + version code schema earlier. + + Returns: + A dictionary of architecture mapped to bitness + mapped to version code suffix. + """ + + if build_number < 5750 and build_number != 5735: + return { + 'arm': { + '32': 0, + '32_64': 3, + '64_32': 4, + '64': 5, + '64_32_high': 9, + }, + 'intel': { + '32': 1, + '32_64': 6, + '64_32': 7, + '64': 8, + }, } -} + return { + 'arm': { + '32': 0, + '32_64': 1, + '64_32': 2, + '64_32_high': 3, + '64': 4, + }, + 'intel': { + '32': 6, + '32_64': 7, + '64_32': 8, + '64': 9, + }, + } + + +VersionCodeComponents = namedtuple('VersionCodeComponents', [ + 'build_number', + 'patch_number', + 'package_name', + 'abi', + 'is_next_build', +]) + + +def TranslateVersionCode(version_code, is_webview=False): + """Translates a version code to its component parts. + + Returns: + A 5-tuple (VersionCodeComponents) with the form: + - Build number - integer + - Patch number - integer + - Package name - string + - ABI - string : if the build is 32_64 or 64_32 or 64, that is just + appended to 'arm' or 'x86' with an underscore + - Whether the build is a "next" build - boolean + + So, for build 100.0.5678.99, built for Monochrome on arm 64_32, not a next + build, you should get: + 5678, 99, 'MONOCHROME', 'arm_64_32', False + """ + if len(version_code) == 9: + build_number = int(version_code[:4]) + else: + # At one branch per day, we'll hit 5 digits in the year 2035. + build_number = int(version_code[:5]) + + is_next_build = False + patch_number_plus_extra = int(version_code[-5:]) + if patch_number_plus_extra >= _NEXT_BUILD_VERSION_CODE_DIFF: + is_next_build = True + patch_number_plus_extra -= _NEXT_BUILD_VERSION_CODE_DIFF + patch_number = patch_number_plus_extra // 100 + + # From branch 3992 the name and abi bits in the version code are swapped. + if build_number >= 3992: + abi_digit = int(version_code[-1]) + package_digit = int(version_code[-2]) + else: + abi_digit = int(version_code[-2]) + package_digit = int(version_code[-1]) + + # Before branch 4844 we added 5 to the package digit to indicate a 'next' + # build. + if build_number < 4844 and package_digit >= 5: + is_next_build = True + package_digit -= 5 + + for package, number in _PACKAGE_NAMES.items(): + if number == package_digit * 10: + if is_webview == ('WEBVIEW' in package): + package_name = package + break + + for arch, bitness_to_number in _GetAbisToDigitMask(build_number).items(): + for bitness, number in bitness_to_number.items(): + if abi_digit == number: + abi = arch if arch != 'intel' else 'x86' + if bitness != '32': + abi += '_' + bitness + break + + return VersionCodeComponents(build_number, patch_number, package_name, abi, + is_next_build) + def GenerateVersionCodes(version_values, arch, is_next_build): """Build dict of version codes for the specified build architecture. Eg: @@ -200,12 +326,29 @@ def GenerateVersionCodes(version_values, arch, is_next_build): version_codes = {} + abi_to_digit_mask = _GetAbisToDigitMask(int(version_values['BUILD'])) for apk, package, abis in _APKS[bitness]: - abi_bits = _ABIS_TO_BIT_MASK[mfg][abis] - package_bits = _PACKAGE_NAMES[package] + if abis == '64_32_high' and arch != 'arm64': + continue + abi_part = abi_to_digit_mask[mfg][abis] + package_part = _PACKAGE_NAMES[package] version_code_name = apk + '_VERSION_CODE' - version_code_val = base_version_code + abi_bits + package_bits + version_code_val = base_version_code + package_part + abi_part version_codes[version_code_name] = str(version_code_val) return version_codes + + +def main(): + parser = argparse.ArgumentParser(description='Parses version codes.') + parser.add_argument('version_code', help='Version code (e.g. 529700010).') + parser.add_argument('--webview', + action='store_true', + help='Whether this is a webview version code.') + args = parser.parse_args() + print(TranslateVersionCode(args.version_code, is_webview=args.webview)) + + +if __name__ == '__main__': + main() diff --git a/build/util/android_chrome_version_test.py b/build/util/android_chrome_version_test.py index eed77488ccda..4ebd007a24de 100644 --- a/build/util/android_chrome_version_test.py +++ b/build/util/android_chrome_version_test.py @@ -1,90 +1,106 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import unittest from android_chrome_version import GenerateVersionCodes +from android_chrome_version import TranslateVersionCode + +EXAMPLE_VERSION_VALUES = { + 'MAJOR': '99', + 'MINOR': '0', + 'BUILD': '4844', + 'PATCH': '0', +} + +EXAMPLE_GROUPED_VERSION_VALUES = { + 'MAJOR': '99', + 'MINOR': '0', + 'BUILD': '5750', + 'PATCH': '0', +} class _VersionTest(unittest.TestCase): """Unittests for the android_chrome_version module. """ - EXAMPLE_VERSION_VALUES = { - 'MAJOR': '74', - 'MINOR': '0', - 'BUILD': '3720', - 'PATCH': '0', - } - def testGenerateVersionCodesAndroidChrome(self): """Assert it gives correct values for standard/example inputs""" - output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False) + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) chrome_version_code = output['CHROME_VERSION_CODE'] - self.assertEqual(chrome_version_code, '372000000') + self.assertEqual(chrome_version_code, '484400000') def testGenerateVersionCodesAndroidChromeModern(self): """Assert it gives correct values for standard/example inputs""" - output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False) + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) chrome_modern_version_code = output['CHROME_MODERN_VERSION_CODE'] - self.assertEqual(chrome_modern_version_code, '372000010') + self.assertEqual(chrome_modern_version_code, '484400010') def testGenerateVersionCodesAndroidMonochrome(self): """Assert it gives correct values for standard/example inputs""" - output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False) + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) monochrome_version_code = output['MONOCHROME_VERSION_CODE'] - self.assertEqual(monochrome_version_code, '372000020') + self.assertEqual(monochrome_version_code, '484400020') def testGenerateVersionCodesAndroidTrichrome(self): """Assert it gives correct values for standard/example inputs""" - output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False) + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) trichrome_version_code = output['TRICHROME_VERSION_CODE'] - self.assertEqual(trichrome_version_code, '372000030') + self.assertEqual(trichrome_version_code, '484400030') def testGenerateVersionCodesAndroidWebviewStable(self): """Assert it gives correct values for standard/example inputs""" - output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False) + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) webview_stable_version_code = output['WEBVIEW_STABLE_VERSION_CODE'] - self.assertEqual(webview_stable_version_code, '372000000') + self.assertEqual(webview_stable_version_code, '484400000') def testGenerateVersionCodesAndroidWebviewBeta(self): """Assert it gives correct values for standard/example inputs""" - output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False) + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE'] - self.assertEqual(webview_beta_version_code, '372000010') + self.assertEqual(webview_beta_version_code, '484400010') def testGenerateVersionCodesAndroidWebviewDev(self): """Assert it gives correct values for standard/example inputs""" - output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False) + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) webview_dev_version_code = output['WEBVIEW_DEV_VERSION_CODE'] - self.assertEqual(webview_dev_version_code, '372000020') + self.assertEqual(webview_dev_version_code, '484400020') def testGenerateVersionCodesAndroidNextBuild(self): """Assert it handles "next" builds correctly""" - output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=True) + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=True) # Get just a sample of values chrome_version_code = output['CHROME_VERSION_CODE'] @@ -92,10 +108,10 @@ def testGenerateVersionCodesAndroidNextBuild(self): webview_stable_version_code = output['WEBVIEW_STABLE_VERSION_CODE'] webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE'] - self.assertEqual(chrome_version_code, '372000050') - self.assertEqual(monochrome_version_code, '372000070') - self.assertEqual(webview_stable_version_code, '372000050') - self.assertEqual(webview_beta_version_code, '372000060') + self.assertEqual(chrome_version_code, '484450000') + self.assertEqual(monochrome_version_code, '484450020') + self.assertEqual(webview_stable_version_code, '484450000') + self.assertEqual(webview_beta_version_code, '484450010') def testGenerateVersionCodesAndroidArchArm(self): """Assert it handles different architectures correctly. @@ -105,11 +121,12 @@ def testGenerateVersionCodesAndroidArchArm(self): See docs in android_chrome_version._ABIS_TO_BIT_MASK for reasoning. """ - output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False) + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) arch_chrome_version_code = output['CHROME_VERSION_CODE'] - self.assertEqual(arch_chrome_version_code, '372000000') + self.assertEqual(arch_chrome_version_code, '484400000') def testGenerateVersionCodesAndroidArchX86(self): """Assert it handles different architectures correctly. @@ -119,13 +136,66 @@ def testGenerateVersionCodesAndroidArchX86(self): See docstring on android_chrome_version._ABIS_TO_BIT_MASK for reasoning. """ - output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='x86', is_next_build=False) + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='x86', + is_next_build=False) + arch_chrome_version_code = output['CHROME_VERSION_CODE'] + + self.assertEqual(arch_chrome_version_code, '484400001') + + def testGenerateVersionCodesAndroidArchArm64(self): + """Assert it handles different architectures correctly. + + Version codes for different builds need to be distinct and maintain a + certain ordering. + See docstring on android_chrome_version._ABIS_TO_BIT_MASK for + reasoning. + """ + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm64', + is_next_build=False) arch_chrome_version_code = output['CHROME_VERSION_CODE'] - self.assertEqual(arch_chrome_version_code, '372000001') + self.assertEqual(arch_chrome_version_code, '484400005') + + def testGenerateVersionCodesAndroidArchArm64Variants(self): + """Assert it handles 64-bit-specific additional version codes correctly. + + Some additional version codes are generated for 64-bit architectures. + See docstring on android_chrome_version.ARCH64_APK_VARIANTS for more info. + """ + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm64', + is_next_build=False) + arch_monochrome_version_code = output['MONOCHROME_VERSION_CODE'] + arch_monochrome_32_version_code = output['MONOCHROME_32_VERSION_CODE'] + arch_monochrome_32_64_version_code = output['MONOCHROME_32_64_VERSION_CODE'] + arch_monochrome_64_32_version_code = output['MONOCHROME_64_32_VERSION_CODE'] + arch_monochrome_64_version_code = output['MONOCHROME_64_VERSION_CODE'] + arch_trichrome_version_code = output['TRICHROME_VERSION_CODE'] + arch_trichrome_32_version_code = output['TRICHROME_32_VERSION_CODE'] + arch_trichrome_32_64_version_code = output['TRICHROME_32_64_VERSION_CODE'] + arch_trichrome_64_32_version_code = output['TRICHROME_64_32_VERSION_CODE'] + arch_trichrome_64_32_high_version_code = output[ + 'TRICHROME_64_32_HIGH_VERSION_CODE'] + arch_trichrome_64_version_code = output['TRICHROME_64_VERSION_CODE'] + arch_trichrome_auto_64_32_version_code = output[ + 'TRICHROME_AUTO_64_32_VERSION_CODE'] + + self.assertEqual(arch_monochrome_32_version_code, '484400020') + self.assertEqual(arch_monochrome_32_64_version_code, '484400023') + self.assertEqual(arch_monochrome_version_code, '484400023') + self.assertEqual(arch_monochrome_64_32_version_code, '484400024') + self.assertEqual(arch_monochrome_64_version_code, '484400025') + self.assertEqual(arch_trichrome_32_version_code, '484400030') + self.assertEqual(arch_trichrome_32_64_version_code, '484400033') + self.assertEqual(arch_trichrome_version_code, '484400033') + self.assertEqual(arch_trichrome_64_32_version_code, '484400034') + self.assertEqual(arch_trichrome_64_32_high_version_code, '484400039') + self.assertEqual(arch_trichrome_64_version_code, '484400035') + self.assertEqual(arch_trichrome_auto_64_32_version_code, '484400054') - def testGenerateVersionCodesAndroidArchMips(self): + def testGenerateVersionCodesAndroidArchX64(self): """Assert it handles different architectures correctly. Version codes for different builds need to be distinct and maintain a @@ -133,11 +203,269 @@ def testGenerateVersionCodesAndroidArchMips(self): See docstring on android_chrome_version._ABIS_TO_BIT_MASK for reasoning. """ - output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='mipsel', is_next_build=False) + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='x64', + is_next_build=False) arch_chrome_version_code = output['CHROME_VERSION_CODE'] - self.assertEqual(arch_chrome_version_code, '372000002') + self.assertEqual(arch_chrome_version_code, '484400008') + + def testGenerateVersionCodesAndroidArchX64Variants(self): + """Assert it handles 64-bit-specific additional version codes correctly. + + Some additional version codes are generated for 64-bit architectures. + See docstring on android_chrome_version.ARCH64_APK_VARIANTS for more info. + """ + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='x64', + is_next_build=False) + arch_monochrome_32_version_code = output['MONOCHROME_32_VERSION_CODE'] + arch_monochrome_32_64_version_code = output['MONOCHROME_32_64_VERSION_CODE'] + arch_monochrome_version_code = output['MONOCHROME_VERSION_CODE'] + arch_monochrome_64_32_version_code = output['MONOCHROME_64_32_VERSION_CODE'] + arch_monochrome_64_version_code = output['MONOCHROME_64_VERSION_CODE'] + arch_trichrome_32_version_code = output['TRICHROME_32_VERSION_CODE'] + arch_trichrome_32_64_version_code = output['TRICHROME_32_64_VERSION_CODE'] + arch_trichrome_version_code = output['TRICHROME_VERSION_CODE'] + arch_trichrome_64_32_version_code = output['TRICHROME_64_32_VERSION_CODE'] + arch_trichrome_64_version_code = output['TRICHROME_64_VERSION_CODE'] + arch_trichrome_auto_64_32_version_code = output[ + 'TRICHROME_AUTO_64_32_VERSION_CODE'] + + self.assertEqual(arch_monochrome_32_version_code, '484400021') + self.assertEqual(arch_monochrome_32_64_version_code, '484400026') + self.assertEqual(arch_monochrome_version_code, '484400026') + self.assertEqual(arch_monochrome_64_32_version_code, '484400027') + self.assertEqual(arch_monochrome_64_version_code, '484400028') + self.assertEqual(arch_trichrome_32_version_code, '484400031') + self.assertEqual(arch_trichrome_32_64_version_code, '484400036') + self.assertEqual(arch_trichrome_version_code, '484400036') + self.assertEqual(arch_trichrome_64_32_version_code, '484400037') + self.assertEqual(arch_trichrome_64_version_code, '484400038') + self.assertEqual(arch_trichrome_auto_64_32_version_code, '484400057') + + def testGenerateVersionCodesAndroidArchOrderArm(self): + """Assert it handles different architectures correctly. + + Version codes for different builds need to be distinct and maintain a + certain ordering. + See docstring on android_chrome_version._ABIS_TO_BIT_MASK for + reasoning. + + Test arm-related values. + """ + arm_output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) + arm64_output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm64', + is_next_build=False) + + arm_chrome_version_code = arm_output['CHROME_VERSION_CODE'] + arm64_chrome_version_code = arm64_output['CHROME_VERSION_CODE'] + + self.assertLess(arm_chrome_version_code, arm64_chrome_version_code) + + def testGenerateVersionCodesAndroidArchOrderX86(self): + """Assert it handles different architectures correctly. + + Version codes for different builds need to be distinct and maintain a + certain ordering. + See docstring on android_chrome_version._ABIS_TO_BIT_MASK for + reasoning. + + Test x86-related values. + """ + x86_output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='x86', + is_next_build=False) + x64_output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='x64', + is_next_build=False) + + x86_chrome_version_code = x86_output['CHROME_VERSION_CODE'] + x64_chrome_version_code = x64_output['CHROME_VERSION_CODE'] + + self.assertLess(x86_chrome_version_code, x64_chrome_version_code) + + def testGenerateVersionCodesAndroidWebviewChannelOrderBeta(self): + """Assert webview beta channel is higher than stable. + + The channel-specific version codes for standalone webview needs to follow + the order stable < beta < dev. + + This allows that if a user opts into beta track, they will always have the + beta apk, including any finch experiments targeted at beta users, even when + beta and stable channels are otherwise on the same version. + """ + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) + + webview_stable_version_code = output['WEBVIEW_STABLE_VERSION_CODE'] + webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE'] + + self.assertGreater(webview_beta_version_code, webview_stable_version_code) + + def testGenerateVersionCodesAndroidWebviewChannelOrderDev(self): + """Assert webview dev channel is higher than beta. + + The channel-specific version codes for standalone webview needs to follow + the order stable < beta < dev. + + This allows that if a user opts into dev track, they will always have the + dev apk, including any finch experiments targeted at dev users, even when + dev and beta channels are otherwise on the same version. + """ + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) + + webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE'] + webview_dev_version_code = output['WEBVIEW_DEV_VERSION_CODE'] + + self.assertGreater(webview_dev_version_code, webview_beta_version_code) + + def testGenerateVersionCodesTrichromeChannelOrderBeta(self): + """Assert Trichrome beta channel is higher than stable. + + When Trichrome channels are compiled to use the stable channel's package + name, their version codes need to follow the order stable < beta. + + This allows that if a user opts into beta track, they will always have the + beta apk, including any finch experiments targeted at beta users, even when + beta and stable channels are otherwise on the same version. + """ + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) + + trichrome_stable_version_code = output['TRICHROME_VERSION_CODE'] + trichrome_beta_version_code = output['TRICHROME_BETA_VERSION_CODE'] + + self.assertGreater(trichrome_beta_version_code, + trichrome_stable_version_code) + + +class _VersionGroupedTest(unittest.TestCase): + """Unittests for the android_chrome_version module (grouped). + """ + def testGenerateVersionCodesAndroidChrome(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + chrome_version_code = output['CHROME_VERSION_CODE'] + + self.assertEqual(chrome_version_code, '575000000') + + def testGenerateVersionCodesAndroidChromeModern(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + chrome_modern_version_code = output['CHROME_MODERN_VERSION_CODE'] + + self.assertEqual(chrome_modern_version_code, '575000010') + + def testGenerateVersionCodesAndroidMonochrome(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + monochrome_version_code = output['MONOCHROME_VERSION_CODE'] + + self.assertEqual(monochrome_version_code, '575000020') + + def testGenerateVersionCodesAndroidTrichrome(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + trichrome_version_code = output['TRICHROME_VERSION_CODE'] + + self.assertEqual(trichrome_version_code, '575000030') + + def testGenerateVersionCodesAndroidWebviewStable(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + webview_stable_version_code = output['WEBVIEW_STABLE_VERSION_CODE'] + + self.assertEqual(webview_stable_version_code, '575000000') + + def testGenerateVersionCodesAndroidWebviewBeta(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE'] + + self.assertEqual(webview_beta_version_code, '575000010') + + def testGenerateVersionCodesAndroidWebviewDev(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + webview_dev_version_code = output['WEBVIEW_DEV_VERSION_CODE'] + + self.assertEqual(webview_dev_version_code, '575000020') + + def testGenerateVersionCodesAndroidNextBuild(self): + """Assert it handles "next" builds correctly""" + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=True) + + # Get just a sample of values + chrome_version_code = output['CHROME_VERSION_CODE'] + monochrome_version_code = output['MONOCHROME_VERSION_CODE'] + webview_stable_version_code = output['WEBVIEW_STABLE_VERSION_CODE'] + webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE'] + + self.assertEqual(chrome_version_code, '575050000') + self.assertEqual(monochrome_version_code, '575050020') + self.assertEqual(webview_stable_version_code, '575050000') + self.assertEqual(webview_beta_version_code, '575050010') + + def testGenerateVersionCodesAndroidArchArm(self): + """Assert it handles different architectures correctly. + + Version codes for different builds need to be distinct and maintain a + certain ordering. + See docs in android_chrome_version._ABIS_TO_BIT_MASK for + reasoning. + """ + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + arch_chrome_version_code = output['CHROME_VERSION_CODE'] + + self.assertEqual(arch_chrome_version_code, '575000000') + + def testGenerateVersionCodesAndroidArchX86(self): + """Assert it handles different architectures correctly. + + Version codes for different builds need to be distinct and maintain a + certain ordering. + See docstring on android_chrome_version._ABIS_TO_BIT_MASK for + reasoning. + """ + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='x86', + is_next_build=False) + arch_chrome_version_code = output['CHROME_VERSION_CODE'] + + self.assertEqual(arch_chrome_version_code, '575000006') def testGenerateVersionCodesAndroidArchArm64(self): """Assert it handles different architectures correctly. @@ -147,11 +475,12 @@ def testGenerateVersionCodesAndroidArchArm64(self): See docstring on android_chrome_version._ABIS_TO_BIT_MASK for reasoning. """ - output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='arm64', is_next_build=False) + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm64', + is_next_build=False) arch_chrome_version_code = output['CHROME_VERSION_CODE'] - self.assertEqual(arch_chrome_version_code, '372000005') + self.assertEqual(arch_chrome_version_code, '575000004') def testGenerateVersionCodesAndroidArchArm64Variants(self): """Assert it handles 64-bit-specific additional version codes correctly. @@ -159,8 +488,9 @@ def testGenerateVersionCodesAndroidArchArm64Variants(self): Some additional version codes are generated for 64-bit architectures. See docstring on android_chrome_version.ARCH64_APK_VARIANTS for more info. """ - output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='arm64', is_next_build=False) + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm64', + is_next_build=False) arch_monochrome_version_code = output['MONOCHROME_VERSION_CODE'] arch_monochrome_32_version_code = output['MONOCHROME_32_VERSION_CODE'] arch_monochrome_32_64_version_code = output['MONOCHROME_32_64_VERSION_CODE'] @@ -171,17 +501,20 @@ def testGenerateVersionCodesAndroidArchArm64Variants(self): arch_trichrome_32_64_version_code = output['TRICHROME_32_64_VERSION_CODE'] arch_trichrome_64_32_version_code = output['TRICHROME_64_32_VERSION_CODE'] arch_trichrome_64_version_code = output['TRICHROME_64_VERSION_CODE'] - - self.assertEqual(arch_monochrome_32_version_code, '372000020') - self.assertEqual(arch_monochrome_32_64_version_code, '372000023') - self.assertEqual(arch_monochrome_version_code, '372000023') - self.assertEqual(arch_monochrome_64_32_version_code, '372000024') - self.assertEqual(arch_monochrome_64_version_code, '372000025') - self.assertEqual(arch_trichrome_32_version_code, '372000030') - self.assertEqual(arch_trichrome_32_64_version_code, '372000033') - self.assertEqual(arch_trichrome_version_code, '372000033') - self.assertEqual(arch_trichrome_64_32_version_code, '372000034') - self.assertEqual(arch_trichrome_64_version_code, '372000035') + arch_trichrome_auto_64_32_version_code = output[ + 'TRICHROME_AUTO_64_32_VERSION_CODE'] + + self.assertEqual(arch_monochrome_32_version_code, '575000020') + self.assertEqual(arch_monochrome_32_64_version_code, '575000021') + self.assertEqual(arch_monochrome_version_code, '575000021') + self.assertEqual(arch_monochrome_64_32_version_code, '575000022') + self.assertEqual(arch_monochrome_64_version_code, '575000024') + self.assertEqual(arch_trichrome_32_version_code, '575000030') + self.assertEqual(arch_trichrome_32_64_version_code, '575000031') + self.assertEqual(arch_trichrome_version_code, '575000031') + self.assertEqual(arch_trichrome_64_32_version_code, '575000032') + self.assertEqual(arch_trichrome_64_version_code, '575000034') + self.assertEqual(arch_trichrome_auto_64_32_version_code, '575000052') def testGenerateVersionCodesAndroidArchX64(self): """Assert it handles different architectures correctly. @@ -191,11 +524,12 @@ def testGenerateVersionCodesAndroidArchX64(self): See docstring on android_chrome_version._ABIS_TO_BIT_MASK for reasoning. """ - output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='x64', is_next_build=False) + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='x64', + is_next_build=False) arch_chrome_version_code = output['CHROME_VERSION_CODE'] - self.assertEqual(arch_chrome_version_code, '372000008') + self.assertEqual(arch_chrome_version_code, '575000009') def testGenerateVersionCodesAndroidArchX64Variants(self): """Assert it handles 64-bit-specific additional version codes correctly. @@ -203,8 +537,9 @@ def testGenerateVersionCodesAndroidArchX64Variants(self): Some additional version codes are generated for 64-bit architectures. See docstring on android_chrome_version.ARCH64_APK_VARIANTS for more info. """ - output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='x64', is_next_build=False) + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='x64', + is_next_build=False) arch_monochrome_32_version_code = output['MONOCHROME_32_VERSION_CODE'] arch_monochrome_32_64_version_code = output['MONOCHROME_32_64_VERSION_CODE'] arch_monochrome_version_code = output['MONOCHROME_VERSION_CODE'] @@ -215,17 +550,20 @@ def testGenerateVersionCodesAndroidArchX64Variants(self): arch_trichrome_version_code = output['TRICHROME_VERSION_CODE'] arch_trichrome_64_32_version_code = output['TRICHROME_64_32_VERSION_CODE'] arch_trichrome_64_version_code = output['TRICHROME_64_VERSION_CODE'] - - self.assertEqual(arch_monochrome_32_version_code, '372000021') - self.assertEqual(arch_monochrome_32_64_version_code, '372000026') - self.assertEqual(arch_monochrome_version_code, '372000026') - self.assertEqual(arch_monochrome_64_32_version_code, '372000027') - self.assertEqual(arch_monochrome_64_version_code, '372000028') - self.assertEqual(arch_trichrome_32_version_code, '372000031') - self.assertEqual(arch_trichrome_32_64_version_code, '372000036') - self.assertEqual(arch_trichrome_version_code, '372000036') - self.assertEqual(arch_trichrome_64_32_version_code, '372000037') - self.assertEqual(arch_trichrome_64_version_code, '372000038') + arch_trichrome_auto_64_32_version_code = output[ + 'TRICHROME_AUTO_64_32_VERSION_CODE'] + + self.assertEqual(arch_monochrome_32_version_code, '575000026') + self.assertEqual(arch_monochrome_32_64_version_code, '575000027') + self.assertEqual(arch_monochrome_version_code, '575000027') + self.assertEqual(arch_monochrome_64_32_version_code, '575000028') + self.assertEqual(arch_monochrome_64_version_code, '575000029') + self.assertEqual(arch_trichrome_32_version_code, '575000036') + self.assertEqual(arch_trichrome_32_64_version_code, '575000037') + self.assertEqual(arch_trichrome_version_code, '575000037') + self.assertEqual(arch_trichrome_64_32_version_code, '575000038') + self.assertEqual(arch_trichrome_64_version_code, '575000039') + self.assertEqual(arch_trichrome_auto_64_32_version_code, '575000058') def testGenerateVersionCodesAndroidArchOrderArm(self): """Assert it handles different architectures correctly. @@ -237,10 +575,12 @@ def testGenerateVersionCodesAndroidArchOrderArm(self): Test arm-related values. """ - arm_output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False) - arm64_output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='arm64', is_next_build=False) + arm_output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + arm64_output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm64', + is_next_build=False) arm_chrome_version_code = arm_output['CHROME_VERSION_CODE'] arm64_chrome_version_code = arm64_output['CHROME_VERSION_CODE'] @@ -257,10 +597,12 @@ def testGenerateVersionCodesAndroidArchOrderX86(self): Test x86-related values. """ - x86_output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='x86', is_next_build=False) - x64_output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='x64', is_next_build=False) + x86_output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='x86', + is_next_build=False) + x64_output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='x64', + is_next_build=False) x86_chrome_version_code = x86_output['CHROME_VERSION_CODE'] x64_chrome_version_code = x64_output['CHROME_VERSION_CODE'] @@ -277,8 +619,9 @@ def testGenerateVersionCodesAndroidWebviewChannelOrderBeta(self): beta apk, including any finch experiments targeted at beta users, even when beta and stable channels are otherwise on the same version. """ - output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False) + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) webview_stable_version_code = output['WEBVIEW_STABLE_VERSION_CODE'] webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE'] @@ -295,14 +638,321 @@ def testGenerateVersionCodesAndroidWebviewChannelOrderDev(self): dev apk, including any finch experiments targeted at dev users, even when dev and beta channels are otherwise on the same version. """ - output = GenerateVersionCodes( - self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False) + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE'] webview_dev_version_code = output['WEBVIEW_DEV_VERSION_CODE'] self.assertGreater(webview_dev_version_code, webview_beta_version_code) + def testGenerateVersionCodesTrichromeChannelOrderBeta(self): + """Assert Trichrome beta channel is higher than stable. + + When Trichrome channels are compiled to use the stable channel's package + name, their version codes need to follow the order stable < beta. + + This allows that if a user opts into beta track, they will always have the + beta apk, including any finch experiments targeted at beta users, even when + beta and stable channels are otherwise on the same version. + """ + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + trichrome_stable_version_code = output['TRICHROME_VERSION_CODE'] + trichrome_beta_version_code = output['TRICHROME_BETA_VERSION_CODE'] + + self.assertGreater(trichrome_beta_version_code, + trichrome_stable_version_code) + + +class _VersionCodeTest(unittest.TestCase): + def testGenerateThenTranslate(self): + """Assert it gives correct values for a version code that we generated.""" + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) + + version_code = output['MONOCHROME_VERSION_CODE'] + + build, patch, package, abi, is_next_build = TranslateVersionCode( + version_code) + self.assertEqual(build, int(EXAMPLE_VERSION_VALUES['BUILD'])) + self.assertEqual(patch, int(EXAMPLE_VERSION_VALUES['PATCH'])) + self.assertEqual(package, 'MONOCHROME') + self.assertEqual(abi, 'arm') + self.assertEqual(is_next_build, False) + + def testPre3992Translate(self): + """Test for an old build when the abi and apk bits were swapped.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '378100010') + self.assertEqual(build, 3781) + self.assertEqual(patch, 0) + self.assertEqual(package, 'CHROME') + self.assertEqual(abi, 'x86') + self.assertEqual(is_next_build, False) + + def testNextBuildTranslate(self): + """Test for a build with next.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499961210') + self.assertEqual(build, 4999) + self.assertEqual(patch, 112) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'arm') + self.assertEqual(is_next_build, True) + + def testPre4844NextBuildTranslate(self): + """Test for a build with next when we added 50 to version code.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '400011260') + self.assertEqual(build, 4000) + self.assertEqual(patch, 112) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'arm') + self.assertEqual(is_next_build, True) + + def testPre3992NextBuildTranslate(self): + """Test for a build with next when we added 5 to version code.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '300011206') + self.assertEqual(build, 3000) + self.assertEqual(patch, 112) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'arm') + self.assertEqual(is_next_build, True) + + def testArm_64BuildTranslate(self): + """Test for a build with arm_64.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499911215') + self.assertEqual(build, 4999) + self.assertEqual(patch, 112) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'arm_64') + self.assertEqual(is_next_build, False) + + def testArm_32_64Translate(self): + """Test for a build with arm_32_64.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499900013') + self.assertEqual(build, 4999) + self.assertEqual(patch, 0) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'arm_32_64') + self.assertEqual(is_next_build, False) + + def testArm_64_32Translate(self): + """Test for a build with Trichrome and arm_64_32.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499900034') + self.assertEqual(build, 4999) + self.assertEqual(patch, 0) + self.assertEqual(package, 'TRICHROME') + self.assertEqual(abi, 'arm_64_32') + self.assertEqual(is_next_build, False) + + def testArm_Auto_64_32Translate(self): + """Test for an auto build with Trichrome and arm_64_32.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499900054') + self.assertEqual(build, 4999) + self.assertEqual(patch, 0) + self.assertEqual(package, 'TRICHROME_AUTO') + self.assertEqual(abi, 'arm_64_32') + self.assertEqual(is_next_build, False) + + def testArm_64_32HighTranslate(self): + """Test for a build with Trichrome and arm_64_32_high.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '534613739') + self.assertEqual(build, 5346) + self.assertEqual(patch, 137) + self.assertEqual(package, 'TRICHROME') + self.assertEqual(abi, 'arm_64_32_high') + self.assertEqual(is_next_build, False) + + def testX86_64Translate(self): + """Test for a build with x86_64.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499900018') + self.assertEqual(build, 4999) + self.assertEqual(patch, 0) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'x86_64') + self.assertEqual(is_next_build, False) + + def testX86_32_64Translate(self): + """Test for a build with x86_32_64.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499900016') + self.assertEqual(build, 4999) + self.assertEqual(patch, 0) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'x86_32_64') + self.assertEqual(is_next_build, False) + + def testX86_64_32Translate(self): + """Test for a build with x86_64_32.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499900017') + self.assertEqual(build, 4999) + self.assertEqual(patch, 0) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'x86_64_32') + self.assertEqual(is_next_build, False) + + def testX86_Auto_64_32Translate(self): + """Test for an auto build with x86_64_32.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499900057') + self.assertEqual(build, 4999) + self.assertEqual(patch, 0) + self.assertEqual(package, 'TRICHROME_AUTO') + self.assertEqual(abi, 'x86_64_32') + self.assertEqual(is_next_build, False) + + def testWebviewTranslate(self): + """Test for a build with Webview.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499900000', is_webview=True) + self.assertEqual(build, 4999) + self.assertEqual(patch, 0) + self.assertEqual(package, 'WEBVIEW_STABLE') + self.assertEqual(abi, 'arm') + self.assertEqual(is_next_build, False) + + +class _VersionCodeGroupedTest(unittest.TestCase): + def testGenerateThenTranslate(self): + """Assert it gives correct values for a version code that we generated.""" + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + version_code = output['MONOCHROME_VERSION_CODE'] + + build, patch, package, abi, is_next_build = TranslateVersionCode( + version_code) + self.assertEqual(build, int(EXAMPLE_GROUPED_VERSION_VALUES['BUILD'])) + self.assertEqual(patch, int(EXAMPLE_GROUPED_VERSION_VALUES['PATCH'])) + self.assertEqual(package, 'MONOCHROME') + self.assertEqual(abi, 'arm') + self.assertEqual(is_next_build, False) + + def testNextBuildTranslate(self): + """Test for a build with next.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575061210') + self.assertEqual(build, 5750) + self.assertEqual(patch, 112) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'arm') + self.assertEqual(is_next_build, True) + + def testArm_64BuildTranslate(self): + """Test for a build with arm_64.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575011214') + self.assertEqual(build, 5750) + self.assertEqual(patch, 112) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'arm_64') + self.assertEqual(is_next_build, False) + + def testArm_32_64Translate(self): + """Test for a build with arm_32_64.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575000011') + self.assertEqual(build, 5750) + self.assertEqual(patch, 0) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'arm_32_64') + self.assertEqual(is_next_build, False) + + def testArm_64_32Translate(self): + """Test for a build with Trichrome and arm_64_32.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575000032') + self.assertEqual(build, 5750) + self.assertEqual(patch, 0) + self.assertEqual(package, 'TRICHROME') + self.assertEqual(abi, 'arm_64_32') + self.assertEqual(is_next_build, False) + + def testArm_Auto_64_32Translate(self): + """Test for an auto build with Trichrome and arm_64_32.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575000052') + self.assertEqual(build, 5750) + self.assertEqual(patch, 0) + self.assertEqual(package, 'TRICHROME_AUTO') + self.assertEqual(abi, 'arm_64_32') + self.assertEqual(is_next_build, False) + + def testArm_64_32HighTranslate(self): + """Test for a build with Trichrome and arm_64_32_high.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '534613739') + self.assertEqual(build, 5346) + self.assertEqual(patch, 137) + self.assertEqual(package, 'TRICHROME') + self.assertEqual(abi, 'arm_64_32_high') + self.assertEqual(is_next_build, False) + + def testX86_64Translate(self): + """Test for a build with x86_64.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575000019') + self.assertEqual(build, 5750) + self.assertEqual(patch, 0) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'x86_64') + self.assertEqual(is_next_build, False) + + def testX86_32_64Translate(self): + """Test for a build with x86_32_64.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575000017') + self.assertEqual(build, 5750) + self.assertEqual(patch, 0) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'x86_32_64') + self.assertEqual(is_next_build, False) + + def testX86_64_32Translate(self): + """Test for a build with x86_64_32.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575000018') + self.assertEqual(build, 5750) + self.assertEqual(patch, 0) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'x86_64_32') + self.assertEqual(is_next_build, False) + + def testX86_Auto_64_32Translate(self): + """Test for an auto build with x86_64_32.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575000058') + self.assertEqual(build, 5750) + self.assertEqual(patch, 0) + self.assertEqual(package, 'TRICHROME_AUTO') + self.assertEqual(abi, 'x86_64_32') + self.assertEqual(is_next_build, False) + + def testWebviewTranslate(self): + """Test for a build with Webview.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575000000', is_webview=True) + self.assertEqual(build, 5750) + self.assertEqual(patch, 0) + self.assertEqual(package, 'WEBVIEW_STABLE') + self.assertEqual(abi, 'arm') + self.assertEqual(is_next_build, False) + if __name__ == '__main__': unittest.main() diff --git a/build/util/branding.gni b/build/util/branding.gni index aa758e6a0e4b..1d4ffd68a75d 100644 --- a/build/util/branding.gni +++ b/build/util/branding.gni @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/util/chromium_git_revision.h.in b/build/util/chromium_git_revision.h.in new file mode 100644 index 000000000000..365961cdc943 --- /dev/null +++ b/build/util/chromium_git_revision.h.in @@ -0,0 +1,8 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// chromium_git_revision.h is generated from chromium_git_revision.h.in. Edit +// the source! + +#define CHROMIUM_GIT_REVISION "@@LASTCHANGE@" diff --git a/build/util/generate_wrapper.gni b/build/util/generate_wrapper.gni index 02e8bca1ff3a..e2ceccc9e3f0 100644 --- a/build/util/generate_wrapper.gni +++ b/build/util/generate_wrapper.gni @@ -1,4 +1,4 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -20,8 +20,6 @@ # build product. Paths can be relative to the containing gn file # or source-absolute. # executable_args: List of arguments to write into the wrapper. -# use_vpython3: If true, invoke the generated wrapper with vpython3 instead -# of vpython. # # Example wrapping a checked-in script: # generate_wrapper("sample_wrapper") { @@ -43,38 +41,28 @@ # wrapper_script = "$root_build_dir/bin/run_sample_build_product" # } template("generate_wrapper") { - _generator_script = "//build/util/generate_wrapper.py" - if (defined(invoker.generator_script)) { - _generator_script = invoker.generator_script - } - _executable_to_wrap = invoker.executable - _wrapper_script = invoker.wrapper_script - if (is_win) { - _wrapper_script += ".bat" - } - if (defined(invoker.executable_args)) { - _wrapped_arguments = invoker.executable_args - } else { - _wrapped_arguments = [] - } - action(target_name) { - forward_variables_from(invoker, - TESTONLY_AND_VISIBILITY + [ - "data", - "data_deps", - "deps", - "sources", - ]) - script = _generator_script - if (!defined(data)) { - data = [] + if (defined(invoker.generator_script)) { + script = invoker.generator_script + } else { + script = "//build/util/generate_wrapper.py" + } + _wrapper_script = invoker.wrapper_script + if (is_win) { + _wrapper_script += ".bat" + } + + data = [ + _wrapper_script, + "//.vpython3", + ] + if (defined(invoker.data)) { + data += invoker.data } - data += [ _wrapper_script ] outputs = [ _wrapper_script ] _rebased_executable_to_wrap = - rebase_path(_executable_to_wrap, root_build_dir) + rebase_path(invoker.executable, root_build_dir) _rebased_wrapper_script = rebase_path(_wrapper_script, root_build_dir) if (is_win) { _script_language = "batch" @@ -92,14 +80,19 @@ template("generate_wrapper") { _script_language, ] - if (defined(invoker.use_vpython3) && invoker.use_vpython3) { - args += [ "--use-vpython3" ] + if (defined(invoker.executable_args)) { + args += [ "--" ] + invoker.executable_args } - args += [ "--" ] - args += _wrapped_arguments - if (defined(invoker.write_runtime_deps)) { - write_runtime_deps = invoker.write_runtime_deps - } + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, + "*", + TESTONLY_AND_VISIBILITY + [ + "data", + "executable", + "executable_args", + "generator_script", + "wrapper_script", + ]) } } diff --git a/build/util/generate_wrapper.py b/build/util/generate_wrapper.py index 07167e865566..b45f5f3bf9af 100755 --- a/build/util/generate_wrapper.py +++ b/build/util/generate_wrapper.py @@ -1,5 +1,5 @@ -#!/usr/bin/env vpython -# Copyright 2019 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -15,7 +15,7 @@ # The interpreter doesn't know about the script, so we have bash # inject the script location. BASH_TEMPLATE = textwrap.dedent("""\ - #!/usr/bin/env {vpython} + #!/usr/bin/env vpython3 _SCRIPT_LOCATION = __file__ {script} """) @@ -27,7 +27,7 @@ # directly. BATCH_TEMPLATE = textwrap.dedent("""\ @SETLOCAL ENABLEDELAYEDEXPANSION \ - & {vpython}.bat -x "%~f0" %* \ + & vpython3.bat -x "%~f0" %* \ & EXIT /B !ERRORLEVEL! _SCRIPT_LOCATION = __file__ {script} @@ -43,8 +43,11 @@ PY_TEMPLATE = textwrap.dedent("""\ import os import re + import shlex + import signal import subprocess import sys + import time _WRAPPED_PATH_RE = re.compile(r'@WrappedPath\(([^)]+)\)') _PATH_TO_OUTPUT_DIR = '{path_to_output_dir}' @@ -104,6 +107,10 @@ def FindIsolatedOutdir(raw_args): outdir = os.environ['ISOLATED_OUTDIR'] return outdir, remaining_args + def InsertWrapperScriptArgs(args): + if '--wrapper-script-args' in args: + idx = args.index('--wrapper-script-args') + args.insert(idx + 1, shlex.join(sys.argv)) def FilterIsolatedOutdirBasedArgs(outdir, args): rargs = [] @@ -137,17 +144,51 @@ def FilterIsolatedOutdirBasedArgs(outdir, args): i += 1 return rargs + def ForwardSignals(proc): + def _sig_handler(sig, _): + if proc.poll() is not None: + return + # SIGBREAK is defined only for win32. + # pylint: disable=no-member + if sys.platform == 'win32' and sig == signal.SIGBREAK: + print("Received signal(%d), sending CTRL_BREAK_EVENT to process %d" % (sig, proc.pid)) + proc.send_signal(signal.CTRL_BREAK_EVENT) + else: + print("Forwarding signal(%d) to process %d" % (sig, proc.pid)) + proc.send_signal(sig) + # pylint: enable=no-member + if sys.platform == 'win32': + signal.signal(signal.SIGBREAK, _sig_handler) # pylint: disable=no-member + else: + signal.signal(signal.SIGTERM, _sig_handler) + signal.signal(signal.SIGINT, _sig_handler) + + def Popen(*args, **kwargs): + assert 'creationflags' not in kwargs + if sys.platform == 'win32': + # Necessary for signal handling. See crbug.com/733612#c6. + kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP + return subprocess.Popen(*args, **kwargs) + + def RunCommand(cmd): + process = Popen(cmd) + ForwardSignals(process) + while process.poll() is None: + time.sleep(0.1) + return process.returncode + def main(raw_args): executable_path = ExpandWrappedPath('{executable_path}') outdir, remaining_args = FindIsolatedOutdir(raw_args) args = {executable_args} + InsertWrapperScriptArgs(args) args = FilterIsolatedOutdirBasedArgs(outdir, args) executable_args = ExpandWrappedPaths(args) - cmd = [executable_path] + args + remaining_args + cmd = [executable_path] + executable_args + remaining_args if executable_path.endswith('.py'): cmd = [sys.executable] + cmd - return subprocess.call(cmd) + return RunCommand(cmd) if __name__ == '__main__': @@ -172,8 +213,7 @@ def Wrap(args): executable_path=str(args.executable), executable_args=str(args.executable_args)) template = SCRIPT_TEMPLATES[args.script_language] - wrapper_script.write( - template.format(script=py_contents, vpython=args.vpython)) + wrapper_script.write(template.format(script=py_contents)) os.chmod(args.wrapper_script, 0o750) return 0 @@ -195,12 +235,6 @@ def CreateArgumentParser(): '--script-language', choices=SCRIPT_TEMPLATES.keys(), help='Language in which the wrapper script will be written.') - parser.add_argument('--use-vpython3', - dest='vpython', - action='store_const', - const='vpython3', - default='vpython', - help='Use vpython3 instead of vpython') parser.add_argument( 'executable_args', nargs='*', help='Arguments to wrap into the executable.') diff --git a/build/util/java_action.gni b/build/util/java_action.gni index 0615b38782fb..81f6da5e18b9 100644 --- a/build/util/java_action.gni +++ b/build/util/java_action.gni @@ -1,4 +1,4 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/util/java_action.py b/build/util/java_action.py index ed9bb601de7b..6382dc23bf30 100755 --- a/build/util/java_action.py +++ b/build/util/java_action.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2015 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/util/lastchange.gni b/build/util/lastchange.gni index a13295900df2..909b9789ff73 100644 --- a/build/util/lastchange.gni +++ b/build/util/lastchange.gni @@ -1,4 +1,4 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/util/lastchange.py b/build/util/lastchange.py index 874870ad56a5..98a6360b4687 100755 --- a/build/util/lastchange.py +++ b/build/util/lastchange.py @@ -1,15 +1,15 @@ -#!/usr/bin/env python -# Copyright (c) 2012 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2012 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ lastchange.py -- Chromium revision fetching utility. """ -from __future__ import print_function import argparse import collections +import datetime import logging import os import subprocess @@ -220,12 +220,22 @@ def main(argv=None): parser.add_argument("-m", "--version-macro", help=("Name of C #define when using --header. Defaults to " "LAST_CHANGE.")) - parser.add_argument("-o", "--output", metavar="FILE", - help=("Write last change to FILE. " - "Can be combined with --header to write both files.")) - parser.add_argument("--header", metavar="FILE", - help=("Write last change to FILE as a C/C++ header. " - "Can be combined with --output to write both files.")) + parser.add_argument("-o", + "--output", + metavar="FILE", + help=("Write last change to FILE. " + "Can be combined with other file-output-related " + "options to write multiple files.")) + parser.add_argument("--header", + metavar="FILE", + help=("Write last change to FILE as a C/C++ header. " + "Can be combined with other file-output-related " + "options to write multiple files.")) + parser.add_argument("--revision", + metavar="FILE", + help=("Write last change to FILE as a one-line revision. " + "Can be combined with other file-output-related " + "options to write multiple files.")) parser.add_argument("--merge-base-ref", default=None, help=("Only consider changes since the merge " @@ -234,6 +244,9 @@ def main(argv=None): help=("Output the revision as a VCS revision ID only (in " "Git, a 40-character commit hash, excluding the " "Cr-Commit-Position).")) + parser.add_argument("--revision-id-prefix", + metavar="PREFIX", + help=("Adds a string prefix to the VCS revision ID.")) parser.add_argument("--print-only", action="store_true", help=("Just print the revision string. Overrides any " "file-output-related options.")) @@ -251,6 +264,7 @@ def main(argv=None): out_file = args.output header = args.header + revision = args.revision commit_filter=args.filter while len(extras) and out_file is None: @@ -294,11 +308,20 @@ def main(argv=None): if args.revision_id_only: revision_string = version_info.revision_id + if args.revision_id_prefix: + revision_string = args.revision_id_prefix + revision_string + if args.print_only: print(revision_string) else: - contents = "LASTCHANGE=%s\n" % revision_string - if not out_file and not args.header: + lastchange_year = datetime.datetime.utcfromtimestamp( + version_info.timestamp).year + contents_lines = [ + "LASTCHANGE=%s" % revision_string, + "LASTCHANGE_YEAR=%s" % lastchange_year, + ] + contents = '\n'.join(contents_lines) + '\n' + if not out_file and not header and not revision: sys.stdout.write(contents) else: if out_file: @@ -311,6 +334,8 @@ def main(argv=None): WriteIfChanged(header, GetHeaderContents(header, args.version_macro, revision_string)) + if revision: + WriteIfChanged(revision, revision_string) return 0 diff --git a/build/util/lib/__init__.py b/build/util/lib/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/build/util/lib/common/PRESUBMIT.py b/build/util/lib/common/PRESUBMIT.py index 53984fde248f..b0477fd589a5 100644 --- a/build/util/lib/common/PRESUBMIT.py +++ b/build/util/lib/common/PRESUBMIT.py @@ -1,11 +1,19 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +USE_PYTHON3 = True + + def _RunTests(input_api, output_api): return (input_api.canned_checks.RunUnitTestsInDirectory( - input_api, output_api, '.', files_to_check=[r'.+_test.py$'])) + input_api, + output_api, + '.', + files_to_check=[r'.+_test.py$'], + run_on_python2=False, + skip_shebang_check=True)) def CheckChangeOnUpload(input_api, output_api): diff --git a/build/util/lib/common/chrome_test_server_spawner.py b/build/util/lib/common/chrome_test_server_spawner.py index 9810215e8116..62788f2b0291 100644 --- a/build/util/lib/common/chrome_test_server_spawner.py +++ b/build/util/lib/common/chrome_test_server_spawner.py @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -38,14 +38,9 @@ # Path that are needed to import necessary modules when launching a testserver. -os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', '') + (':%s:%s:%s' - % (os.path.join(_DIR_SOURCE_ROOT, 'third_party'), - os.path.join(_DIR_SOURCE_ROOT, 'third_party', 'tlslite'), - os.path.join(_DIR_SOURCE_ROOT, 'net', 'tools', 'testserver'))) - - -# The timeout (in seconds) of starting up the Python test server. -_TEST_SERVER_STARTUP_TIMEOUT = 10 +os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', '') + ( + ':%s:%s' % (os.path.join(_DIR_SOURCE_ROOT, 'third_party'), + os.path.join(_DIR_SOURCE_ROOT, 'net', 'tools', 'testserver'))) def _GetServerTypeCommandLine(server_type): @@ -108,30 +103,25 @@ def __init__(self, ready_event, arguments, port_forwarder): self.port_forwarder = port_forwarder self.test_server_process = None self.is_ready = False - self.host_port = self.arguments['port'] + self.host_port = 0 self.host_ocsp_port = 0 assert isinstance(self.host_port, int) # The forwarder device port now is dynamically allocated. self.forwarder_device_port = 0 self.forwarder_ocsp_device_port = 0 - # Anonymous pipe in order to get port info from test server. - self.pipe_in = None - self.pipe_out = None self.process = None self.command_line = [] - def _WaitToStartAndGetPortFromTestServer(self): + def _WaitToStartAndGetPortFromTestServer(self, pipe_in): """Waits for the Python test server to start and gets the port it is using. The port information is passed by the Python test server with a pipe given - by self.pipe_out. It is written as a result to |self.host_port|. + by |pipe_in|. It is written as a result to |self.host_port|. Returns: Whether the port used by the test server was successfully fetched. """ - assert self.host_port == 0 and self.pipe_out and self.pipe_in - (in_fds, _, _) = select.select([self.pipe_in, ], [], [], - _TEST_SERVER_STARTUP_TIMEOUT) + (in_fds, _, _) = select.select([pipe_in], [], []) if len(in_fds) == 0: _logger.error('Failed to wait to the Python test server to be started.') return False @@ -141,14 +131,14 @@ def _WaitToStartAndGetPortFromTestServer(self): # configured to use little-endian. # TODO(jnd): Change the Python test server and local_test_server_*.cc to # use a unified byte order (either big-endian or little-endian). - data_length = os.read(self.pipe_in, struct.calcsize('=L')) + data_length = os.read(pipe_in, struct.calcsize('=L')) if data_length: (data_length,) = struct.unpack('=L', data_length) assert data_length if not data_length: _logger.error('Failed to get length of server data.') return False - server_data_json = os.read(self.pipe_in, data_length) + server_data_json = os.read(pipe_in, data_length) if not server_data_json: _logger.error('Failed to get server data.') return False @@ -173,7 +163,7 @@ def _WaitToStartAndGetPortFromTestServer(self): return self.port_forwarder.WaitPortNotAvailable(self.host_port) - def _GenerateCommandLineArguments(self): + def _GenerateCommandLineArguments(self, pipe_out): """Generates the command line to run the test server. Note that all options are processed by following the definitions in @@ -189,15 +179,11 @@ def _GenerateCommandLineArguments(self): if type_cmd: self.command_line.append(type_cmd) - # Use a pipe to get the port given by the instance of Python test server - # if the test does not specify the port. - assert self.host_port == args_copy['port'] - if self.host_port == 0: - (self.pipe_in, self.pipe_out) = os.pipe() - self.command_line.append('--startup-pipe=%d' % self.pipe_out) + # Use a pipe to get the port given by the Python test server. + self.command_line.append('--startup-pipe=%d' % pipe_out) # Pass the remaining arguments as-is. - for key, values in args_copy.iteritems(): + for key, values in args_copy.items(): if not isinstance(values, list): values = [values] for value in values: @@ -206,12 +192,15 @@ def _GenerateCommandLineArguments(self): else: self.command_line.append('--%s=%s' % (key, value)) - def _CloseUnnecessaryFDsForTestServerProcess(self): + def _CloseUnnecessaryFDsForTestServerProcess(self, pipe_out): # This is required to avoid subtle deadlocks that could be caused by the # test server child process inheriting undesirable file descriptors such as - # file lock file descriptors. - for fd in xrange(0, 1024): - if fd != self.pipe_out: + # file lock file descriptors. Note stdin, stdout, and stderr (0-2) are left + # alone and redirected with subprocess.Popen. It is important to leave those + # fds filled, or the test server will accidentally open other fds at those + # numbers. + for fd in range(3, 1024): + if fd != pipe_out: try: os.close(fd) except: @@ -220,63 +209,86 @@ def _CloseUnnecessaryFDsForTestServerProcess(self): def run(self): _logger.info('Start running the thread!') self.wait_event.clear() - self._GenerateCommandLineArguments() - command = [sys.executable, - os.path.join(_DIR_SOURCE_ROOT, 'net', 'tools', 'testserver', - 'testserver.py')] + self.command_line - _logger.info('Running: %s', command) - - # Disable PYTHONUNBUFFERED because it has a bad interaction with the - # testserver. Remove once this interaction is fixed. - unbuf = os.environ.pop('PYTHONUNBUFFERED', None) - - # Pass _DIR_SOURCE_ROOT as the child's working directory so that relative - # paths in the arguments are resolved correctly. - self.process = subprocess.Popen( - command, preexec_fn=self._CloseUnnecessaryFDsForTestServerProcess, - cwd=_DIR_SOURCE_ROOT) - if unbuf: - os.environ['PYTHONUNBUFFERED'] = unbuf - if self.process: - if self.pipe_out: - self.is_ready = self._WaitToStartAndGetPortFromTestServer() - else: - self.is_ready = self.port_forwarder.WaitPortNotAvailable(self.host_port) - - if self.is_ready: - port_map = [(0, self.host_port)] - if self.host_ocsp_port: - port_map.extend([(0, self.host_ocsp_port)]) - self.port_forwarder.Map(port_map) - - self.forwarder_device_port = \ - self.port_forwarder.GetDevicePortForHostPort(self.host_port) - if self.host_ocsp_port: - self.forwarder_ocsp_device_port = \ - self.port_forwarder.GetDevicePortForHostPort(self.host_ocsp_port) - - # Check whether the forwarder is ready on the device. - self.is_ready = self.forwarder_device_port and \ - self.port_forwarder.WaitDevicePortReady(self.forwarder_device_port) - - # Wake up the request handler thread. - self.ready_event.set() - # Keep thread running until Stop() gets called. - self.stop_event.wait() - if self.process.poll() is None: - self.process.kill() - # Wait for process to actually terminate. - # (crbug.com/946475) - self.process.wait() - self.port_forwarder.Unmap(self.forwarder_device_port) - self.process = None - self.is_ready = False - if self.pipe_out: - os.close(self.pipe_in) - os.close(self.pipe_out) - self.pipe_in = None - self.pipe_out = None + # Set up a pipe for the server to report when it has started. + pipe_in, pipe_out = os.pipe() + + # TODO(crbug.com/941669): Remove if condition after python3 migration. + if hasattr(os, 'set_inheritable'): + os.set_inheritable(pipe_out, True) + + try: + self._GenerateCommandLineArguments(pipe_out) + # TODO(crbug.com/941669): When this script is ported to Python 3, replace + # 'vpython3' below with sys.executable. + command = [ + 'vpython3', + os.path.join(_DIR_SOURCE_ROOT, 'net', 'tools', 'testserver', + 'testserver.py') + ] + self.command_line + _logger.info('Running: %s', command) + + # Disable PYTHONUNBUFFERED because it has a bad interaction with the + # testserver. Remove once this interaction is fixed. + unbuf = os.environ.pop('PYTHONUNBUFFERED', None) + + # Pass _DIR_SOURCE_ROOT as the child's working directory so that relative + # paths in the arguments are resolved correctly. devnull can be replaced + # with subprocess.DEVNULL in Python 3. + with open(os.devnull, 'r+b') as devnull: + self.process = subprocess.Popen( + command, + preexec_fn=lambda: self._CloseUnnecessaryFDsForTestServerProcess( + pipe_out), + stdin=devnull, + # Preserve stdout and stderr from the test server. + stdout=None, + stderr=None, + cwd=_DIR_SOURCE_ROOT, + close_fds=False) + + # Close pipe_out early. If self.process crashes, this will be visible + # in _WaitToStartAndGetPortFromTestServer's select loop. + os.close(pipe_out) + pipe_out = -1 + if unbuf: + os.environ['PYTHONUNBUFFERED'] = unbuf + self.is_ready = self._WaitToStartAndGetPortFromTestServer(pipe_in) + + if self.is_ready: + port_map = [(0, self.host_port)] + if self.host_ocsp_port: + port_map.extend([(0, self.host_ocsp_port)]) + self.port_forwarder.Map(port_map) + + self.forwarder_device_port = \ + self.port_forwarder.GetDevicePortForHostPort(self.host_port) + if self.host_ocsp_port: + self.forwarder_ocsp_device_port = \ + self.port_forwarder.GetDevicePortForHostPort(self.host_ocsp_port) + + # Check whether the forwarder is ready on the device. + self.is_ready = self.forwarder_device_port and \ + self.port_forwarder.WaitDevicePortReady(self.forwarder_device_port) + + # Wake up the request handler thread. + self.ready_event.set() + # Keep thread running until Stop() gets called. + self.stop_event.wait() + if self.process.poll() is None: + self.process.kill() + # Wait for process to actually terminate. + # (crbug.com/946475) + self.process.wait() + + self.port_forwarder.Unmap(self.forwarder_device_port) + self.process = None + self.is_ready = False + finally: + if pipe_in >= 0: + os.close(pipe_in) + if pipe_out >= 0: + os.close(pipe_out) _logger.info('Test-server has died.') self.wait_event.set() @@ -313,16 +325,16 @@ def _SendResponse(self, response_code, response_reason, additional_headers, for header_name in additional_headers: self.send_header(header_name, additional_headers[header_name]) self.end_headers() - self.wfile.write(contents) + self.wfile.write(contents.encode('utf8')) self.wfile.flush() def _StartTestServer(self): """Starts the test server thread.""" _logger.info('Handling request to spawn a test server.') - content_type = self.headers.getheader('content-type') + content_type = self.headers.get('content-type') if content_type != 'application/json': raise Exception('Bad content-type for start request.') - content_length = self.headers.getheader('content-length') + content_length = self.headers.get('content-length') if not content_length: content_length = 0 try: diff --git a/build/util/lib/common/perf_result_data_type.py b/build/util/lib/common/perf_result_data_type.py index 67b550a46c05..a75c916d793c 100644 --- a/build/util/lib/common/perf_result_data_type.py +++ b/build/util/lib/common/perf_result_data_type.py @@ -1,4 +1,4 @@ -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/util/lib/common/perf_tests_results_helper.py b/build/util/lib/common/perf_tests_results_helper.py index 153886dce568..8246e206c0b1 100644 --- a/build/util/lib/common/perf_tests_results_helper.py +++ b/build/util/lib/common/perf_tests_results_helper.py @@ -1,8 +1,7 @@ -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import print_function import re import sys diff --git a/build/util/lib/common/unittest_util.py b/build/util/lib/common/unittest_util.py index 9683ab717a8f..4779c7d05bfa 100644 --- a/build/util/lib/common/unittest_util.py +++ b/build/util/lib/common/unittest_util.py @@ -1,4 +1,4 @@ -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -95,7 +95,7 @@ def GetTestName(test): def FilterTestSuite(suite, gtest_filter): """Returns a new filtered tests suite based on the given gtest filter. - See https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md + See https://github.com/google/googletest/blob/main/docs/advanced.md for gtest_filter specification. """ return unittest.TestSuite(FilterTests(GetTestsFromSuite(suite), gtest_filter)) @@ -119,7 +119,7 @@ def FilterTests(all_tests, gtest_filter): def FilterTestNames(all_tests, gtest_filter): """Filter a list of test names based on the given gtest filter. - See https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md + See https://github.com/google/googletest/blob/main/docs/advanced.md for gtest_filter specification. Args: diff --git a/build/util/lib/common/unittest_util_test.py b/build/util/lib/common/unittest_util_test.py index 1514c9b6d4c0..e775e1a5ebc1 100755 --- a/build/util/lib/common/unittest_util_test.py +++ b/build/util/lib/common/unittest_util_test.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2015 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/util/lib/common/util.py b/build/util/lib/common/util.py index a415b1f534b4..33c58e237b69 100644 --- a/build/util/lib/common/util.py +++ b/build/util/lib/common/util.py @@ -1,4 +1,4 @@ -# Copyright 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/util/lib/results/DIR_METADATA b/build/util/lib/results/DIR_METADATA new file mode 100644 index 000000000000..aea61c0bffb5 --- /dev/null +++ b/build/util/lib/results/DIR_METADATA @@ -0,0 +1,11 @@ +# Metadata information for this directory. +# +# For more information on DIR_METADATA files, see: +# https://source.chromium.org/chromium/infra/infra/+/main:go/src/infra/tools/dirmd/README.md +# +# For the schema of this file, see Metadata message: +# https://source.chromium.org/chromium/infra/infra/+/main:go/src/infra/tools/dirmd/proto/dir_metadata.proto + +monorail { + component: "Infra>Client>Chrome" +} diff --git a/build/util/lib/results/OWNERS b/build/util/lib/results/OWNERS new file mode 100644 index 000000000000..b2f010585605 --- /dev/null +++ b/build/util/lib/results/OWNERS @@ -0,0 +1 @@ +bjoyce@chromium.org diff --git a/build/util/lib/results/__init__.py b/build/util/lib/results/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/build/android/pylib/base/result_sink.py b/build/util/lib/results/result_sink.py similarity index 50% rename from build/android/pylib/base/result_sink.py rename to build/util/lib/results/result_sink.py index 7aae1b7a740c..3996b65d8d4d 100644 --- a/build/android/pylib/base/result_sink.py +++ b/build/util/lib/results/result_sink.py @@ -1,26 +1,27 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2020 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from __future__ import absolute_import import base64 import json +import logging import os import six -from pylib.base import base_test_result import requests # pylint: disable=import-error +from lib.results import result_types -# Maps base_test_results to the luci test-result.proto. +# Maps result_types to the luci test-result.proto. # https://godoc.org/go.chromium.org/luci/resultdb/proto/v1#TestStatus RESULT_MAP = { - base_test_result.ResultType.UNKNOWN: 'ABORT', - base_test_result.ResultType.PASS: 'PASS', - base_test_result.ResultType.FAIL: 'FAIL', - base_test_result.ResultType.CRASH: 'CRASH', - base_test_result.ResultType.TIMEOUT: 'ABORT', - base_test_result.ResultType.SKIP: 'SKIP', - base_test_result.ResultType.NOTRUN: 'SKIP', + result_types.UNKNOWN: 'ABORT', + result_types.PASS: 'PASS', + result_types.FAIL: 'FAIL', + result_types.CRASH: 'CRASH', + result_types.TIMEOUT: 'ABORT', + result_types.SKIP: 'SKIP', + result_types.NOTRUN: 'SKIP', } @@ -46,19 +47,40 @@ class ResultSinkClient(object): This assumes that the rdb stream has been called already and that the server is listening. """ + def __init__(self, context): base_url = 'http://%s/prpc/luci.resultsink.v1.Sink' % context['address'] self.test_results_url = base_url + '/ReportTestResults' self.report_artifacts_url = base_url + '/ReportInvocationLevelArtifacts' - self.headers = { + headers = { 'Content-Type': 'application/json', 'Accept': 'application/json', 'Authorization': 'ResultSink %s' % context['auth_token'], } - - def Post(self, test_id, status, duration, test_log, test_file, - artifacts=None): + self.session = requests.Session() + self.session.headers.update(headers) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + def close(self): + """Closes the session backing the sink.""" + self.session.close() + + def Post(self, + test_id, + status, + duration, + test_log, + test_file, + variant=None, + artifacts=None, + failure_reason=None, + html_artifact=None): """Uploads the test result to the ResultSink server. This assumes that the rdb stream has been called already and that @@ -70,14 +92,21 @@ def Post(self, test_id, status, duration, test_log, test_file, duration: An int representing time in ms. test_log: A string representing the test's output. test_file: A string representing the file location of the test. + variant: An optional dict of variant key value pairs as the + additional variant sent from test runners, which can override + or add to the variants passed to `rdb stream` command. artifacts: An optional dict of artifacts to attach to the test. + failure_reason: An optional string with the reason why the test failed. + Should be None if the test did not fail. + html_artifact: An optional html-formatted string to prepend to the test's + log. Useful to encode click-able URL links in the test log, since that + won't be formatted in the test_log. Returns: N/A """ assert status in RESULT_MAP - expected = status in (base_test_result.ResultType.PASS, - base_test_result.ResultType.SKIP) + expected = status in (result_types.PASS, result_types.SKIP) result_db_status = RESULT_MAP[status] tr = { @@ -92,22 +121,33 @@ def Post(self, test_id, status, duration, test_log, test_file, }, { # Status before getting mapped to result_db statuses. - 'key': 'android_test_runner_status', + 'key': 'raw_status', 'value': status, } ], 'testId': test_id, + 'testMetadata': { + 'name': test_id, + } } + if variant: + tr['variant'] = {'def': variant} + artifacts = artifacts or {} + tr['summaryHtml'] = html_artifact if html_artifact else '' if test_log: # Upload the original log without any modifications. b64_log = six.ensure_str(base64.b64encode(six.ensure_binary(test_log))) artifacts.update({'Test Log': {'contents': b64_log}}) - tr['summaryHtml'] = '' + tr['summaryHtml'] += '' if artifacts: tr['artifacts'] = artifacts + if failure_reason: + tr['failureReason'] = { + 'primaryErrorMessage': _TruncateToUTF8Bytes(failure_reason, 1024) + } if duration is not None: # Duration must be formatted to avoid scientific notation in case @@ -116,17 +156,13 @@ def Post(self, test_id, status, duration, test_log, test_file, tr['duration'] = '%.9fs' % float(duration / 1000.0) if test_file and str(test_file).startswith('//'): - tr['testMetadata'] = { - 'name': test_id, - 'location': { - 'file_name': test_file, - 'repo': 'https://chromium.googlesource.com/chromium/src', - } + tr['testMetadata']['location'] = { + 'file_name': test_file, + 'repo': 'https://chromium.googlesource.com/chromium/src', } - res = requests.post(url=self.test_results_url, - headers=self.headers, - data=json.dumps({'testResults': [tr]})) + res = self.session.post(url=self.test_results_url, + data=json.dumps({'testResults': [tr]})) res.raise_for_status() def ReportInvocationLevelArtifacts(self, artifacts): @@ -139,7 +175,33 @@ def ReportInvocationLevelArtifacts(self, artifacts): artifacts: A dict of artifacts to attach to the invocation. """ req = {'artifacts': artifacts} - res = requests.post(url=self.report_artifacts_url, - headers=self.headers, - data=json.dumps(req)) + res = self.session.post(url=self.report_artifacts_url, data=json.dumps(req)) res.raise_for_status() + + +def _TruncateToUTF8Bytes(s, length): + """ Truncates a string to a given number of bytes when encoded as UTF-8. + + Ensures the given string does not take more than length bytes when encoded + as UTF-8. Adds trailing ellipsis (...) if truncation occurred. A truncated + string may end up encoding to a length slightly shorter than length because + only whole Unicode codepoints are dropped. + + Args: + s: The string to truncate. + length: the length (in bytes) to truncate to. + """ + try: + encoded = s.encode('utf-8') + # When encode throws UnicodeDecodeError in py2, it usually means the str is + # already encoded and has non-ascii chars. So skip re-encoding it. + except UnicodeDecodeError: + encoded = s + if len(encoded) > length: + # Truncate, leaving space for trailing ellipsis (...). + encoded = encoded[:length - 3] + # Truncating the string encoded as UTF-8 may have left the final codepoint + # only partially present. Pass 'ignore' to acknowledge and ensure this is + # dropped. + return encoded.decode('utf-8', 'ignore') + "..." + return s diff --git a/build/util/lib/results/result_sink_test.py b/build/util/lib/results/result_sink_test.py new file mode 100755 index 000000000000..7d65677776b7 --- /dev/null +++ b/build/util/lib/results/result_sink_test.py @@ -0,0 +1,138 @@ +#!/usr/bin/env vpython3 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import json +import os +import sys +import unittest + +# The following non-std imports are fetched via vpython. See the list at +# //.vpython3 +import mock # pylint: disable=import-error +import six + +_BUILD_UTIL_PATH = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..', '..')) +if _BUILD_UTIL_PATH not in sys.path: + sys.path.insert(0, _BUILD_UTIL_PATH) + +from lib.results import result_sink +from lib.results import result_types + +_FAKE_CONTEXT = { + 'address': 'some-ip-address', + 'auth_token': 'some-auth-token', +} + + +class InitClientTest(unittest.TestCase): + @mock.patch.dict(os.environ, {}, clear=True) + def testEmptyClient(self): + # No LUCI_CONTEXT env var should prevent a client from being created. + client = result_sink.TryInitClient() + self.assertIsNone(client) + + @mock.patch.dict(os.environ, {'LUCI_CONTEXT': 'some-file.json'}) + def testBasicClient(self): + luci_context_json = { + 'result_sink': _FAKE_CONTEXT, + } + if six.PY2: + open_builtin_path = '__builtin__.open' + else: + open_builtin_path = 'builtins.open' + with mock.patch(open_builtin_path, + mock.mock_open(read_data=json.dumps(luci_context_json))): + client = result_sink.TryInitClient() + self.assertEqual( + client.test_results_url, + 'http://some-ip-address/prpc/luci.resultsink.v1.Sink/ReportTestResults') + self.assertEqual(client.session.headers['Authorization'], + 'ResultSink some-auth-token') + + @mock.patch('requests.Session') + def testReuseSession(self, mock_session): + client = result_sink.ResultSinkClient(_FAKE_CONTEXT) + client.Post('some-test', result_types.PASS, 0, 'some-test-log', None) + client.Post('some-test', result_types.PASS, 0, 'some-test-log', None) + self.assertEqual(mock_session.call_count, 1) + self.assertEqual(client.session.post.call_count, 2) + + @mock.patch('requests.Session.close') + def testCloseClient(self, mock_close): + client = result_sink.ResultSinkClient(_FAKE_CONTEXT) + client.close() + mock_close.assert_called_once() + + @mock.patch('requests.Session.close') + def testClientAsContextManager(self, mock_close): + with result_sink.ResultSinkClient(_FAKE_CONTEXT) as client: + mock_close.assert_not_called() + mock_close.assert_called_once() + + +class ClientTest(unittest.TestCase): + def setUp(self): + self.client = result_sink.ResultSinkClient(_FAKE_CONTEXT) + + @mock.patch('requests.Session.post') + def testPostPassingTest(self, mock_post): + self.client.Post('some-test', result_types.PASS, 0, 'some-test-log', None) + self.assertEqual( + mock_post.call_args[1]['url'], + 'http://some-ip-address/prpc/luci.resultsink.v1.Sink/ReportTestResults') + data = json.loads(mock_post.call_args[1]['data']) + self.assertEqual(data['testResults'][0]['testId'], 'some-test') + self.assertEqual(data['testResults'][0]['status'], 'PASS') + + @mock.patch('requests.Session.post') + def testPostFailingTest(self, mock_post): + self.client.Post('some-test', + result_types.FAIL, + 0, + 'some-test-log', + None, + failure_reason='omg test failure') + data = json.loads(mock_post.call_args[1]['data']) + self.assertEqual(data['testResults'][0]['status'], 'FAIL') + self.assertEqual(data['testResults'][0]['testMetadata']['name'], + 'some-test') + self.assertEqual( + data['testResults'][0]['failureReason']['primaryErrorMessage'], + 'omg test failure') + + @mock.patch('requests.Session.post') + def testPostWithTestFile(self, mock_post): + self.client.Post('some-test', result_types.PASS, 0, 'some-test-log', + '//some/test.cc') + data = json.loads(mock_post.call_args[1]['data']) + self.assertEqual( + data['testResults'][0]['testMetadata']['location']['file_name'], + '//some/test.cc') + self.assertEqual(data['testResults'][0]['testMetadata']['name'], + 'some-test') + self.assertIsNotNone(data['testResults'][0]['summaryHtml']) + + @mock.patch('requests.Session.post') + def testPostWithVariant(self, mock_post): + self.client.Post('some-test', + result_types.PASS, + 0, + 'some-test-log', + None, + variant={ + 'key1': 'value1', + 'key2': 'value2' + }) + data = json.loads(mock_post.call_args[1]['data']) + self.assertEqual(data['testResults'][0]['variant'], + {'def': { + 'key1': 'value1', + 'key2': 'value2' + }}) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/util/lib/results/result_types.py b/build/util/lib/results/result_types.py new file mode 100644 index 000000000000..f8b52c173e0d --- /dev/null +++ b/build/util/lib/results/result_types.py @@ -0,0 +1,25 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Module containing base test results classes.""" + +# The test passed. +PASS = 'SUCCESS' + +# The test was intentionally skipped. +SKIP = 'SKIPPED' + +# The test failed. +FAIL = 'FAILURE' + +# The test caused the containing process to crash. +CRASH = 'CRASH' + +# The test timed out. +TIMEOUT = 'TIMEOUT' + +# The test ran, but we couldn't determine what happened. +UNKNOWN = 'UNKNOWN' + +# The test did not run. +NOTRUN = 'NOTRUN' diff --git a/build/util/process_version.gni b/build/util/process_version.gni index e1ccb9541894..cd9671ccdb82 100644 --- a/build/util/process_version.gni +++ b/build/util/process_version.gni @@ -1,7 +1,9 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +import("//build/util/lastchange.gni") + # Runs the version processing script over the given template file to produce # an output file. This is used for generating various forms of files that # incorporate the product name and version. @@ -64,7 +66,7 @@ template("process_version") { action(action_name) { script = "//build/util/version.py" - inputs = [] + inputs = [ lastchange_file ] if (defined(invoker.inputs)) { inputs += invoker.inputs } @@ -114,7 +116,11 @@ template("process_version") { if (!process_only) { source_set(source_set_name) { - forward_variables_from(invoker, [ "visibility" ]) + forward_variables_from(invoker, + [ + "visibility", + "deps", + ]) sources = get_target_outputs(":$action_name") public_deps = [ ":$action_name" ] } diff --git a/build/util/python2_action.py b/build/util/python2_action.py deleted file mode 100644 index 609665b00279..000000000000 --- a/build/util/python2_action.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. -"""Script for ensuring that a python action runs under Python2, not Python3.""" - -import subprocess -import sys - -if sys.version_info.major == 2: - # If we get here, we're already Python2, so just re-execute the - # command without the wrapper. - exe = sys.executable -elif sys.executable.endswith('.exe'): - # If we get here, we're a Python3 executable likely running on - # Windows, so look for the Python2 wrapper in depot_tools. We - # can't invoke it directly because some command lines might exceed the - # 8K commamand line length limit in cmd.exe, but we can use it to - # find the underlying executable, which we can then safely call. - exe = subprocess.check_output( - ['python.bat', '-c', - 'import sys; print(sys.executable)']).decode('utf8').strip() -else: - # If we get here, we are a Python3 executable. Hope that we can find - # a `python2.7` in path somewhere. - exe = 'python2.7' - -sys.exit(subprocess.call([exe] + sys.argv[1:])) diff --git a/build/util/version.gni b/build/util/version.gni deleted file mode 100644 index fb8715d32dc6..000000000000 --- a/build/util/version.gni +++ /dev/null @@ -1,149 +0,0 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# This exposes the Chrome version as GN variables for use in build files. -# This also generates the various version codes used for builds of chrome for -# android. -# -# PREFER NOT TO USE THESE. The GYP build uses this kind of thing extensively. -# However, it is far better to write an action (or use the process_version -# wrapper in build/util/version.gni) to generate a file at build-time with the -# information you need. This allows better dependency checking and GN will -# run faster. -# -# These values should only be used if you REALLY need to depend on them at -# build-time, for example, in the computation of output file names. - -# Give version.py a pattern that will expand to a GN scope consisting of -# all values we need at once. -_version_dictionary_template = "full = \"@MAJOR@.@MINOR@.@BUILD@.@PATCH@\" " + - "major = \"@MAJOR@\" minor = \"@MINOR@\" " + - "build = \"@BUILD@\" patch = \"@PATCH@\" " - -# The file containing the Chrome version number. -chrome_version_file = "//chrome/VERSION" - -_script_arguments = [] - -if (is_mac) { - _version_dictionary_template += "patch_hi = @PATCH_HI@ patch_lo = @PATCH_LO@ " - - _script_arguments += [ - "-e", - "PATCH_HI=int(PATCH)//256", - "-e", - "PATCH_LO=int(PATCH)%256", - ] -} else if (target_os == "android") { - import("//build/config/android/config.gni") - - _version_dictionary_template += - "chrome_version_code = " + "\"@CHROME_VERSION_CODE@\" " + - "chrome_modern_version_code = \"@CHROME_MODERN_VERSION_CODE@\" " + - "monochrome_version_code = \"@MONOCHROME_VERSION_CODE@\" " + - "trichrome_version_code = \"@TRICHROME_VERSION_CODE@\" " + - "webview_stable_version_code = \"@WEBVIEW_STABLE_VERSION_CODE@\" " + - "webview_beta_version_code = \"@WEBVIEW_BETA_VERSION_CODE@\" " + - "webview_dev_version_code = \"@WEBVIEW_DEV_VERSION_CODE@\" " - - if (target_cpu == "arm64" || target_cpu == "x64") { - _version_dictionary_template += "monochrome_32_version_code = \"@MONOCHROME_32_VERSION_CODE@\" " + "monochrome_32_64_version_code = \"@MONOCHROME_32_64_VERSION_CODE@\" " + "monochrome_64_32_version_code = \"@MONOCHROME_64_32_VERSION_CODE@\" " + "monochrome_64_version_code = \"@MONOCHROME_64_VERSION_CODE@\" " + "trichrome_32_version_code = \"@TRICHROME_32_VERSION_CODE@\" " + "trichrome_32_64_version_code = \"@TRICHROME_32_64_VERSION_CODE@\" " + "trichrome_64_32_version_code = \"@TRICHROME_64_32_VERSION_CODE@\" " + "trichrome_64_version_code = \"@TRICHROME_64_VERSION_CODE@\" " + "webview_32_stable_version_code = \"@WEBVIEW_32_STABLE_VERSION_CODE@\" " + "webview_32_beta_version_code = \"@WEBVIEW_32_BETA_VERSION_CODE@\" " + "webview_32_dev_version_code = \"@WEBVIEW_32_DEV_VERSION_CODE@\" " - } - - _script_arguments += [ - "-a", - target_cpu, - ] - - if (defined(final_android_sdk) && !final_android_sdk) { - _script_arguments += [ "--next" ] - } -} - -_script_arguments += [ - "-f", - rebase_path(chrome_version_file, root_build_dir), - "-t", - _version_dictionary_template, - "--os", - target_os, -] - -_result = exec_script("version.py", - _script_arguments, - "scope", - [ - chrome_version_file, - "android_chrome_version.py", - ]) - -# Full version. For example "45.0.12321.0" -chrome_version_full = _result.full - -# The consituent parts of the full version. -chrome_version_major = _result.major -chrome_version_minor = _result.minor -chrome_version_build = _result.build -chrome_version_patch = _result.patch - -if (is_mac) { - chrome_version_patch_hi = _result.patch_hi - chrome_version_patch_lo = _result.patch_lo - - chrome_dylib_version = "$chrome_version_build.$chrome_version_patch_hi" + - ".$chrome_version_patch_lo" -} else if (target_os == "android") { - forward_variables_from(_result, - [ - "chrome_modern_version_code", - "chrome_version_code", - "monochrome_version_code", - "monochrome_32_version_code", - "monochrome_32_64_version_code", - "monochrome_64_32_version_code", - "monochrome_64_version_code", - "trichrome_version_code", - "trichrome_32_version_code", - "trichrome_32_64_version_code", - "trichrome_64_32_version_code", - "trichrome_64_version_code", - "webview_beta_version_code", - "webview_dev_version_code", - "webview_stable_version_code", - "webview_32_beta_version_code", - "webview_32_dev_version_code", - "webview_32_stable_version_code", - ]) - - chrome_version_name = chrome_version_full - - lines_to_write = [ - "VersionName: $chrome_version_name", - "Chrome: $chrome_version_code", - "ChromeModern: $chrome_modern_version_code", - "Monochrome: $monochrome_version_code", - "TrichromeChrome: $trichrome_version_code", - "AndroidWebviewStable: $webview_stable_version_code", - "AndroidWebviewBeta: $webview_beta_version_code", - "AndroidWebviewDev: $webview_dev_version_code", - ] - - if (target_cpu == "arm64" || target_cpu == "x64") { - lines_to_write += [ - "Monochrome32: $monochrome_32_version_code", - "Monochrome3264: $monochrome_32_64_version_code", - "Monochrome6432: $monochrome_64_32_version_code", - "Monochrome64: $monochrome_64_version_code", - "TrichromeChrome32: $trichrome_32_version_code", - "TrichromeChrome3264: $trichrome_32_64_version_code", - "TrichromeChrome6432: $trichrome_64_32_version_code", - "TrichromeChrome64: $trichrome_64_version_code", - "AndroidWebview32Stable: $webview_32_stable_version_code", - "AndroidWebview32Beta: $webview_32_beta_version_code", - "AndroidWebview32Dev: $webview_32_dev_version_code", - ] - } - - write_file("$root_out_dir/android_chrome_versions.txt", lines_to_write) -} diff --git a/build/util/version.py b/build/util/version.py index 4f440c4ee7b4..9bf51cd35122 100755 --- a/build/util/version.py +++ b/build/util/version.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2014 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -7,7 +7,6 @@ version.py -- Chromium version string substitution utility. """ -from __future__ import print_function import argparse import os @@ -26,9 +25,10 @@ def FetchValuesFromFile(values_dict, file_name): The file must exist, otherwise you get the Python exception from open(). """ - for line in open(file_name, 'r').readlines(): - key, val = line.rstrip('\r\n').split('=', 1) - values_dict[key] = val + with open(file_name, 'r') as f: + for line in f.readlines(): + key, val = line.rstrip('\r\n').split('=', 1) + values_dict[key] = val def FetchValues(file_list, is_official_build=None): @@ -55,6 +55,15 @@ def FetchValues(file_list, is_official_build=None): for file_name in file_list: FetchValuesFromFile(values, file_name) + script_dirname = os.path.dirname(os.path.realpath(__file__)) + lastchange_filename = os.path.join(script_dirname, "LASTCHANGE") + lastchange_values = {} + FetchValuesFromFile(lastchange_values, lastchange_filename) + + for placeholder_key, placeholder_value in values.items(): + values[placeholder_key] = SubstTemplate(placeholder_value, + lastchange_values) + return values @@ -137,12 +146,10 @@ def BuildParser(): help='Whether the current build should be an official ' 'build, used in addition to the environment ' 'variable.') - parser.add_argument( - '--next', - action='store_true', - help='Whether the current build should be a "next" ' - 'build, which targets pre-release versions of ' - 'Android') + parser.add_argument('--next', + action='store_true', + help='Whether the current build should be a "next" ' + 'build, which targets pre-release versions of Android.') parser.add_argument('args', nargs=argparse.REMAINDER, help='For compatibility: INPUT and OUTPUT can be ' 'passed as positional arguments.') diff --git a/build/util/version_test.py b/build/util/version_test.py index 2a65ddc71638..f9b468f0505d 100644 --- a/build/util/version_test.py +++ b/build/util/version_test.py @@ -1,4 +1,4 @@ -# Copyright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -40,7 +40,6 @@ class _VersionTest(unittest.TestCase): _ANDROID_CHROME_VARS = [ 'chrome_version_code', - 'chrome_modern_version_code', 'monochrome_version_code', 'trichrome_version_code', 'webview_stable_version_code', @@ -95,7 +94,7 @@ def testFetchValuesFromFile(self): result = {} version.FetchValuesFromFile(result, self._CHROME_VERSION_FILE) - for key, val in result.iteritems(): + for key, val in result.items(): self.assertIsInstance(key, str) self.assertIsInstance(val, str) @@ -105,15 +104,12 @@ def testBuildOutputAndroid(self): get_new_args=lambda args: self._EXAMPLE_ANDROID_ARGS) contents = output['contents'] - self.assertRegexpMatches(contents, r'\bchrome_version_code = "\d+"\s') - self.assertRegexpMatches(contents, - r'\bchrome_modern_version_code = "\d+"\s') - self.assertRegexpMatches(contents, r'\bmonochrome_version_code = "\d+"\s') - self.assertRegexpMatches(contents, r'\btrichrome_version_code = "\d+"\s') - self.assertRegexpMatches(contents, - r'\bwebview_stable_version_code = "\d+"\s') - self.assertRegexpMatches(contents, r'\bwebview_beta_version_code = "\d+"\s') - self.assertRegexpMatches(contents, r'\bwebview_dev_version_code = "\d+"\s') + self.assertRegex(contents, r'\bchrome_version_code = "\d+"\s') + self.assertRegex(contents, r'\bmonochrome_version_code = "\d+"\s') + self.assertRegex(contents, r'\btrichrome_version_code = "\d+"\s') + self.assertRegex(contents, r'\bwebview_stable_version_code = "\d+"\s') + self.assertRegex(contents, r'\bwebview_beta_version_code = "\d+"\s') + self.assertRegex(contents, r'\bwebview_dev_version_code = "\d+"\s') def testBuildOutputAndroidArchVariantsArm64(self): """Assert 64-bit-specific version codes""" @@ -129,14 +125,10 @@ def testBuildOutputAndroidArchVariantsArm64(self): output = self._RunBuildOutput(get_new_args=lambda args: new_args) contents = output['contents'] - self.assertRegexpMatches(contents, - r'\bmonochrome_64_32_version_code = "\d+"\s') - self.assertRegexpMatches(contents, - r'\bmonochrome_64_version_code = "\d+"\s') - self.assertRegexpMatches(contents, - r'\btrichrome_64_32_version_code = "\d+"\s') - self.assertRegexpMatches(contents, - r'\btrichrome_64_version_code = "\d+"\s') + self.assertRegex(contents, r'\bmonochrome_64_32_version_code = "\d+"\s') + self.assertRegex(contents, r'\bmonochrome_64_version_code = "\d+"\s') + self.assertRegex(contents, r'\btrichrome_64_32_version_code = "\d+"\s') + self.assertRegex(contents, r'\btrichrome_64_version_code = "\d+"\s') def testBuildOutputAndroidArchVariantsX64(self): """Assert 64-bit-specific version codes""" @@ -152,19 +144,17 @@ def testBuildOutputAndroidArchVariantsX64(self): output = self._RunBuildOutput(get_new_args=lambda args: new_args) contents = output['contents'] - self.assertRegexpMatches(contents, - r'\bmonochrome_64_32_version_code = "\d+"\s') - self.assertRegexpMatches(contents, - r'\bmonochrome_64_version_code = "\d+"\s') - self.assertRegexpMatches(contents, - r'\btrichrome_64_32_version_code = "\d+"\s') - self.assertRegexpMatches(contents, - r'\btrichrome_64_version_code = "\d+"\s') + self.assertRegex(contents, r'\bmonochrome_64_32_version_code = "\d+"\s') + self.assertRegex(contents, r'\bmonochrome_64_version_code = "\d+"\s') + self.assertRegex(contents, r'\btrichrome_64_32_version_code = "\d+"\s') + self.assertRegex(contents, r'\btrichrome_64_version_code = "\d+"\s') def testBuildOutputAndroidChromeArchInput(self): """Assert it raises an exception when using an invalid architecture input""" new_args = _ReplaceArgs(self._EXAMPLE_ANDROID_ARGS, ['-a', 'foobar']) - with self.assertRaises(SystemExit) as cm: + # Mock sys.stderr because argparse will print to stderr when we pass + # the invalid '-a' value. + with self.assertRaises(SystemExit) as cm, mock.patch('sys.stderr'): self._RunBuildOutput(get_new_args=lambda args: new_args) self.assertEqual(cm.exception.code, 2) diff --git a/build/util/webkit_version.h.in b/build/util/webkit_version.h.in deleted file mode 100644 index 41960e7d884f..000000000000 --- a/build/util/webkit_version.h.in +++ /dev/null @@ -1,9 +0,0 @@ -// Copyright 2014 The Chromium Authors. All rights reserved. -// Use of this source is governed by a BSD-style license that can be -// found in the LICENSE file. - -// webkit_version.h is generated from webkit_version.h.in. Edit the source! - -#define WEBKIT_VERSION_MAJOR 537 -#define WEBKIT_VERSION_MINOR 36 -#define WEBKIT_SVN_REVISION "@@LASTCHANGE@" diff --git a/build/vs_toolchain.py b/build/vs_toolchain.py index c3b1182b75b4..a9cd6f03d921 100755 --- a/build/vs_toolchain.py +++ b/build/vs_toolchain.py @@ -1,15 +1,13 @@ -#!/usr/bin/env python -# Copyright 2014 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import print_function import collections import glob import json import os -import pipes import platform import re import shutil @@ -19,8 +17,7 @@ from gn_helpers import ToGNString -# VS 2019 16.61 with 10.0.19041 SDK, and 10.0.17134 version of -# d3dcompiler_47.dll, with ARM64 libraries and UWP support. +# VS 2022 17.4 with 10.0.22621.0 SDK with ARM64 libraries and UWP support. # See go/chromium-msvc-toolchain for instructions about how to update the # toolchain. # @@ -29,25 +26,40 @@ # # * //base/win/windows_version.cc NTDDI preprocessor check # Triggers a compiler error if the available SDK is older than the minimum. +# * SDK_VERSION in this file +# Must match the packaged/required SDK version. +# * SDK_VERSION in build/toolchain/win/setup_toolchain.py. # * //build/config/win/BUILD.gn NTDDI_VERSION value # Affects the availability of APIs in the toolchain headers. # * //docs/windows_build_instructions.md mentions of VS or Windows SDK. # Keeps the document consistent with the toolchain version. -TOOLCHAIN_HASH = '20d5f2553f' +# * //tools/win/setenv.py +# Add/remove VS versions when upgrading to a new VS version. +# * MSVC_TOOLSET_VERSION in this file +# Maps between Visual Studio version and MSVC toolset +# * MSVS_VERSIONS in this file +# Records the packaged and default version of Visual Studio +TOOLCHAIN_HASH = '27370823e7' +SDK_VERSION = '10.0.22621.0' script_dir = os.path.dirname(os.path.realpath(__file__)) json_data_file = os.path.join(script_dir, 'win_toolchain.json') # VS versions are listed in descending order of priority (highest first). +# The first version is assumed by this script to be the one that is packaged, +# which makes a difference for the arm64 runtime. MSVS_VERSIONS = collections.OrderedDict([ - ('2019', '16.0'), - ('2017', '15.0'), + ('2022', '17.0'), # Default and packaged version of Visual Studio. + ('2019', '16.0'), + ('2017', '15.0'), ]) # List of preferred VC toolset version based on MSVS +# Order is not relevant for this dictionary. MSVC_TOOLSET_VERSION = { - '2019' : 'VC142', - '2017' : 'VC141', + '2022': 'VC143', + '2019': 'VC142', + '2017': 'VC141', } def _HostIsWindows(): @@ -167,13 +179,17 @@ def GetVisualStudioVersion(): # Checking vs%s_install environment variables. # For example, vs2019_install could have the value # "C:\Program Files (x86)\Microsoft Visual Studio\2019\Community". - # Only vs2017_install and vs2019_install are supported. + # Only vs2017_install, vs2019_install and vs2022_install are supported. path = os.environ.get('vs%s_install' % version) if path and os.path.exists(path): available_versions.append(version) break # Detecting VS under possible paths. - path = os.path.expandvars('%ProgramFiles(x86)%' + + if version >= '2022': + program_files_path_variable = '%ProgramFiles%' + else: + program_files_path_variable = '%ProgramFiles(x86)%' + path = os.path.expandvars(program_files_path_variable + '/Microsoft Visual Studio/%s' % version) if path and any( os.path.exists(os.path.join(path, edition)) @@ -200,23 +216,26 @@ def DetectVisualStudioPath(): # the registry. For details see: # https://blogs.msdn.microsoft.com/heaths/2016/09/15/changes-to-visual-studio-15-setup/ # For now we use a hardcoded default with an environment variable override. - for path in ( - os.environ.get('vs%s_install' % version_as_year), - os.path.expandvars('%ProgramFiles(x86)%' + - '/Microsoft Visual Studio/%s/Enterprise' % - version_as_year), - os.path.expandvars('%ProgramFiles(x86)%' + - '/Microsoft Visual Studio/%s/Professional' % - version_as_year), - os.path.expandvars('%ProgramFiles(x86)%' + - '/Microsoft Visual Studio/%s/Community' % - version_as_year), - os.path.expandvars('%ProgramFiles(x86)%' + - '/Microsoft Visual Studio/%s/Preview' % - version_as_year), - os.path.expandvars('%ProgramFiles(x86)%' + - '/Microsoft Visual Studio/%s/BuildTools' % - version_as_year)): + if version_as_year >= '2022': + program_files_path_variable = '%ProgramFiles%' + else: + program_files_path_variable = '%ProgramFiles(x86)%' + for path in (os.environ.get('vs%s_install' % version_as_year), + os.path.expandvars(program_files_path_variable + + '/Microsoft Visual Studio/%s/Enterprise' % + version_as_year), + os.path.expandvars(program_files_path_variable + + '/Microsoft Visual Studio/%s/Professional' % + version_as_year), + os.path.expandvars(program_files_path_variable + + '/Microsoft Visual Studio/%s/Community' % + version_as_year), + os.path.expandvars(program_files_path_variable + + '/Microsoft Visual Studio/%s/Preview' % + version_as_year), + os.path.expandvars(program_files_path_variable + + '/Microsoft Visual Studio/%s/BuildTools' % + version_as_year)): if path and os.path.exists(path): return path @@ -289,47 +308,28 @@ def _CopyUCRTRuntime(target_dir, source_dir, target_cpu, suffix): target = os.path.join(target_dir, dll) source = os.path.join(source_dir, dll) _CopyRuntimeImpl(target, source) - # Copy the UCRT files from the Windows SDK. This location includes the - # api-ms-win-crt-*.dll files that are not found in the Windows directory. - # These files are needed for component builds. If WINDOWSSDKDIR is not set - # use the default SDK path. This will be the case when - # DEPOT_TOOLS_WIN_TOOLCHAIN=0 and vcvarsall.bat has not been run. - win_sdk_dir = os.path.normpath( - os.environ.get('WINDOWSSDKDIR', - os.path.expandvars('%ProgramFiles(x86)%' - '\\Windows Kits\\10'))) - # ARM64 doesn't have a redist for the ucrt DLLs because they are always - # present in the OS. - if target_cpu != 'arm64': - # Starting with the 10.0.17763 SDK the ucrt files are in a version-named - # directory - this handles both cases. - redist_dir = os.path.join(win_sdk_dir, 'Redist') - version_dirs = glob.glob(os.path.join(redist_dir, '10.*')) - if len(version_dirs) > 0: - _SortByHighestVersionNumberFirst(version_dirs) - redist_dir = version_dirs[0] - ucrt_dll_dirs = os.path.join(redist_dir, 'ucrt', 'DLLs', target_cpu) - ucrt_files = glob.glob(os.path.join(ucrt_dll_dirs, 'api-ms-win-*.dll')) - assert len(ucrt_files) > 0 - for ucrt_src_file in ucrt_files: - file_part = os.path.basename(ucrt_src_file) - ucrt_dst_file = os.path.join(target_dir, file_part) - _CopyRuntimeImpl(ucrt_dst_file, ucrt_src_file, False) - # We must copy ucrtbase.dll for x64/x86, and ucrtbased.dll for all CPU types. - if target_cpu != 'arm64' or not suffix.startswith('.'): - if not suffix.startswith('.'): - # ucrtbased.dll is located at {win_sdk_dir}/bin/{a.b.c.d}/{target_cpu}/ - # ucrt/. - sdk_bin_root = os.path.join(win_sdk_dir, 'bin') - sdk_bin_sub_dirs = glob.glob(os.path.join(sdk_bin_root, '10.*')) - # Select the most recent SDK if there are multiple versions installed. - _SortByHighestVersionNumberFirst(sdk_bin_sub_dirs) - for directory in sdk_bin_sub_dirs: - sdk_redist_root_version = os.path.join(sdk_bin_root, directory) - if not os.path.isdir(sdk_redist_root_version): - continue - source_dir = os.path.join(sdk_redist_root_version, target_cpu, 'ucrt') - break + # We must copy ucrtbased.dll for all CPU types. The rest of the Universal CRT + # is installed as part of the OS in Windows 10 and beyond. + if not suffix.startswith('.'): + win_sdk_dir = os.path.normpath( + os.environ.get( + 'WINDOWSSDKDIR', + os.path.expandvars('%ProgramFiles(x86)%' + '\\Windows Kits\\10'))) + # ucrtbased.dll is located at {win_sdk_dir}/bin/{a.b.c.d}/{target_cpu}/ + # ucrt/. + sdk_bin_root = os.path.join(win_sdk_dir, 'bin') + sdk_bin_sub_dirs = glob.glob(os.path.join(sdk_bin_root, '10.*')) + # Select the most recent SDK if there are multiple versions installed. + _SortByHighestVersionNumberFirst(sdk_bin_sub_dirs) + for directory in sdk_bin_sub_dirs: + sdk_redist_root_version = os.path.join(sdk_bin_root, directory) + if not os.path.isdir(sdk_redist_root_version): + continue + source_dir = os.path.join(sdk_redist_root_version, target_cpu, 'ucrt') + if not os.path.isdir(source_dir): + continue + break _CopyRuntimeImpl(os.path.join(target_dir, 'ucrtbase' + suffix), os.path.join(source_dir, 'ucrtbase' + suffix)) @@ -398,20 +398,33 @@ def CopyDlls(target_dir, configuration, target_cpu): if configuration == 'Debug': _CopyRuntime(target_dir, runtime_dir, target_cpu, debug=True) _CopyDebugger(target_dir, target_cpu) + if target_cpu == 'arm64': + target_dir = os.path.join(target_dir, 'win_clang_x64') + target_cpu = 'x64' + runtime_dir = x64_runtime + os.makedirs(target_dir, exist_ok=True) + _CopyRuntime(target_dir, runtime_dir, target_cpu, debug=False) + if configuration == 'Debug': + _CopyRuntime(target_dir, runtime_dir, target_cpu, debug=True) + _CopyDebugger(target_dir, target_cpu) def _CopyDebugger(target_dir, target_cpu): - """Copy dbghelp.dll and dbgcore.dll into the requested directory as needed. + """Copy dbghelp.dll, dbgcore.dll, and msdia140.dll into the requested + directory. target_cpu is one of 'x86', 'x64' or 'arm64'. dbghelp.dll is used when Chrome needs to symbolize stacks. Copying this file from the SDK directory avoids using the system copy of dbghelp.dll which then - ensures compatibility with recent debug information formats, such as VS - 2017 /debug:fastlink PDBs. + ensures compatibility with recent debug information formats, such as + large-page PDBs. Note that for these DLLs to be deployed to swarming bots they + also need to be listed in group("runtime_libs"). dbgcore.dll is needed when using some functions from dbghelp.dll (like MinidumpWriteDump). + + msdia140.dll is needed for tools like symupload.exe and dump_syms.exe. """ win_sdk_dir = SetEnvironmentAndGetSDKDir() if not win_sdk_dir: @@ -420,23 +433,25 @@ def _CopyDebugger(target_dir, target_cpu): # List of debug files that should be copied, the first element of the tuple is # the name of the file and the second indicates if it's optional. debug_files = [('dbghelp.dll', False), ('dbgcore.dll', True)] - # The UCRT is not a redistributable component on arm64. - if target_cpu != 'arm64': - debug_files.extend([('api-ms-win-downlevel-kernel32-l2-1-0.dll', False), - ('api-ms-win-eventing-provider-l1-1-0.dll', False)]) for debug_file, is_optional in debug_files: full_path = os.path.join(win_sdk_dir, 'Debuggers', target_cpu, debug_file) if not os.path.exists(full_path): if is_optional: continue else: - raise Exception('%s not found in "%s"\r\nYou must install' - 'Windows 10 SDK version 10.0.19041.0 including the ' + raise Exception('%s not found in "%s"\r\nYou must install ' + 'Windows 10 SDK version %s including the ' '"Debugging Tools for Windows" feature.' % - (debug_file, full_path)) + (debug_file, full_path, SDK_VERSION)) target_path = os.path.join(target_dir, debug_file) _CopyRuntimeImpl(target_path, full_path) + # The x64 version of msdia140.dll is always used because symupload and + # dump_syms are always built as x64 binaries. + dia_path = os.path.join(NormalizePath(os.environ['GYP_MSVS_OVERRIDE_PATH']), + 'DIA SDK', 'bin', 'amd64', 'msdia140.dll') + _CopyRuntimeImpl(os.path.join(target_dir, 'msdia140.dll'), dia_path) + def _GetDesiredVsToolchainHashes(): """Load a list of SHA1s corresponding to the toolchains that we want installed @@ -487,8 +502,7 @@ def Update(force=False, no_download=False): # For testing this block, unmount existing mounts with # fusermount -u third_party/depot_tools/win_toolchain/vs_files if sys.platform.startswith('linux') and not os.path.ismount(toolchain_dir): - import distutils.spawn - ciopfs = distutils.spawn.find_executable('ciopfs') + ciopfs = shutil.which('ciopfs') if not ciopfs: # ciopfs not found in PATH; try the one downloaded from the DEPS hook. ciopfs = os.path.join(script_dir, 'ciopfs') @@ -547,11 +561,13 @@ def GetToolchainDir(): win_sdk_dir = SetEnvironmentAndGetSDKDir() print('''vs_path = %s +sdk_version = %s sdk_path = %s vs_version = %s wdk_dir = %s runtime_dirs = %s -''' % (ToGNString(NormalizePath(os.environ['GYP_MSVS_OVERRIDE_PATH'])), +''' % (ToGNString(NormalizePath( + os.environ['GYP_MSVS_OVERRIDE_PATH'])), ToGNString(SDK_VERSION), ToGNString(win_sdk_dir), ToGNString(GetVisualStudioVersion()), ToGNString(NormalizePath(os.environ.get('WDK_DIR', ''))), ToGNString(os.path.pathsep.join(runtime_dll_dirs or ['None'])))) diff --git a/build/whitespace_file.txt b/build/whitespace_file.txt index c6768a3aa226..0b23301145f9 100644 --- a/build/whitespace_file.txt +++ b/build/whitespace_file.txt @@ -1,4 +1,4 @@ -Copyright 2014 The Chromium Authors. All rights reserved. +Copyright 2014 The Chromium Authors Use of this useless file is governed by a BSD-style license that can be found in the LICENSE file. @@ -188,3 +188,24 @@ Vestibulum rhoncus neque sodales nibh lobortis, non fringilla odio aliquet. Praesent ultrices quam eu pretium ultrices. Quisque et consequat ex. Curabitur sed nunc neque. foo + +And if you go chasing rabbits +And you know you're going to fall +Tell 'em a hookah-smoking caterpillar + +Isn't it supposed to be a whitespace file? +Let's add some " ". + +I'll join to add my first commit here. P. S. It has stopped being a story long long ago. + +PANCAKE RECIPE: +STEP 1: Put 100g plain flour, 2 large eggs, 300ml milk, 1 tbsp sunflower or vegetable oil +and a pinch of salt into a bowl or large jug, then whisk to a smooth batter. +STEP 2: Set aside for 30 mins to rest if you have time, or start cooking straight away. +STEP 3: Set a medium frying pan or crêpe pan over a medium heat and carefully wipe it with some +oiled kitchen paper. +STEP 4: When hot, cook your pancakes for 1 min on each side until golden, keeping them warm +in a low oven as you go. +STEP 5: Serve with lemon wedges and caster sugar, or your favourite filling. +Once cold, you can layer the pancakes between baking parchment, +then wrap in cling film and freeze for up to 2 months. diff --git a/build/win/BUILD.gn b/build/win/BUILD.gn index 6f3dd5342526..cfb8d162f7d3 100644 --- a/build/win/BUILD.gn +++ b/build/win/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -42,54 +42,6 @@ if (is_win) { "$root_out_dir/cdb/winxp/exts.dll", "$root_out_dir/cdb/winxp/ntsdexts.dll", ] - if (current_cpu != "arm64") { - # The UCRT files are not redistributable for ARM64 Win32. - outputs += [ - "$root_out_dir/cdb/api-ms-win-core-console-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-datetime-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-debug-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-errorhandling-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-file-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-file-l1-2-0.dll", - "$root_out_dir/cdb/api-ms-win-core-file-l2-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-handle-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-heap-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-interlocked-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-libraryloader-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-localization-l1-2-0.dll", - "$root_out_dir/cdb/api-ms-win-core-memory-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-namedpipe-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-processenvironment-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-processthreads-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-processthreads-l1-1-1.dll", - "$root_out_dir/cdb/api-ms-win-core-profile-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-rtlsupport-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-string-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-synch-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-synch-l1-2-0.dll", - "$root_out_dir/cdb/api-ms-win-core-sysinfo-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-timezone-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-core-util-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-crt-conio-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-crt-convert-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-crt-environment-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-crt-filesystem-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-crt-heap-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-crt-locale-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-crt-math-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-crt-multibyte-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-crt-private-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-crt-process-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-crt-runtime-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-crt-stdio-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-crt-string-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-crt-time-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-crt-utility-l1-1-0.dll", - "$root_out_dir/cdb/api-ms-win-downlevel-kernel32-l2-1-0.dll", - "$root_out_dir/cdb/api-ms-win-eventing-provider-l1-1-0.dll", - "$root_out_dir/cdb/ucrtbase.dll", - ] - } args = [ rebase_path("$root_out_dir/cdb", root_out_dir), current_cpu, @@ -98,7 +50,11 @@ if (is_win) { } group("runtime_libs") { - data = [] + # These are needed for any tests that need to decode stacks. + data = [ + "$root_out_dir/dbghelp.dll", + "$root_out_dir/dbgcore.dll", + ] if (is_component_build) { # Copy the VS runtime DLLs into the isolate so that they don't have to be # preinstalled on the target machine. The debug runtimes have a "d" at @@ -127,56 +83,6 @@ if (is_win) { "ASan is only supported in 64-bit builds on Windows.") data += [ "$clang_base_path/lib/clang/$clang_version/lib/windows/clang_rt.asan_dynamic-x86_64.dll" ] } - if (current_cpu != "arm64") { - data += [ - # Universal Windows 10 CRT files - "$root_out_dir/api-ms-win-core-console-l1-1-0.dll", - "$root_out_dir/api-ms-win-core-datetime-l1-1-0.dll", - "$root_out_dir/api-ms-win-core-debug-l1-1-0.dll", - "$root_out_dir/api-ms-win-core-errorhandling-l1-1-0.dll", - "$root_out_dir/api-ms-win-core-file-l1-1-0.dll", - "$root_out_dir/api-ms-win-core-file-l1-2-0.dll", - "$root_out_dir/api-ms-win-core-file-l2-1-0.dll", - "$root_out_dir/api-ms-win-core-handle-l1-1-0.dll", - "$root_out_dir/api-ms-win-core-heap-l1-1-0.dll", - "$root_out_dir/api-ms-win-core-interlocked-l1-1-0.dll", - "$root_out_dir/api-ms-win-core-libraryloader-l1-1-0.dll", - "$root_out_dir/api-ms-win-core-localization-l1-2-0.dll", - "$root_out_dir/api-ms-win-core-memory-l1-1-0.dll", - "$root_out_dir/api-ms-win-core-namedpipe-l1-1-0.dll", - "$root_out_dir/api-ms-win-core-processenvironment-l1-1-0.dll", - "$root_out_dir/api-ms-win-core-processthreads-l1-1-0.dll", - "$root_out_dir/api-ms-win-core-processthreads-l1-1-1.dll", - "$root_out_dir/api-ms-win-core-profile-l1-1-0.dll", - "$root_out_dir/api-ms-win-core-rtlsupport-l1-1-0.dll", - "$root_out_dir/api-ms-win-core-string-l1-1-0.dll", - "$root_out_dir/api-ms-win-core-synch-l1-1-0.dll", - "$root_out_dir/api-ms-win-core-synch-l1-2-0.dll", - "$root_out_dir/api-ms-win-core-sysinfo-l1-1-0.dll", - "$root_out_dir/api-ms-win-core-timezone-l1-1-0.dll", - "$root_out_dir/api-ms-win-core-util-l1-1-0.dll", - "$root_out_dir/api-ms-win-crt-conio-l1-1-0.dll", - "$root_out_dir/api-ms-win-crt-convert-l1-1-0.dll", - "$root_out_dir/api-ms-win-crt-environment-l1-1-0.dll", - "$root_out_dir/api-ms-win-crt-filesystem-l1-1-0.dll", - "$root_out_dir/api-ms-win-crt-heap-l1-1-0.dll", - "$root_out_dir/api-ms-win-crt-locale-l1-1-0.dll", - "$root_out_dir/api-ms-win-crt-math-l1-1-0.dll", - "$root_out_dir/api-ms-win-crt-multibyte-l1-1-0.dll", - "$root_out_dir/api-ms-win-crt-private-l1-1-0.dll", - "$root_out_dir/api-ms-win-crt-process-l1-1-0.dll", - "$root_out_dir/api-ms-win-crt-runtime-l1-1-0.dll", - "$root_out_dir/api-ms-win-crt-stdio-l1-1-0.dll", - "$root_out_dir/api-ms-win-crt-string-l1-1-0.dll", - "$root_out_dir/api-ms-win-crt-time-l1-1-0.dll", - "$root_out_dir/api-ms-win-crt-utility-l1-1-0.dll", - "$root_out_dir/api-ms-win-downlevel-kernel32-l2-1-0.dll", - "$root_out_dir/api-ms-win-eventing-provider-l1-1-0.dll", - ] - if (!is_debug) { - data += [ "$root_out_dir/ucrtbase.dll" ] - } - } } } } diff --git a/build/win/compatibility.manifest b/build/win/compatibility.manifest index 10d10da3826b..755c272c671d 100644 --- a/build/win/compatibility.manifest +++ b/build/win/compatibility.manifest @@ -12,6 +12,9 @@ + + + diff --git a/build/win/copy_cdb_to_output.py b/build/win/copy_cdb_to_output.py index a0b99bb7764f..5d124403340a 100755 --- a/build/win/copy_cdb_to_output.py +++ b/build/win/copy_cdb_to_output.py @@ -1,9 +1,8 @@ -#!/usr/bin/env python -# Copyright 2016 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import print_function import glob import hashlib @@ -101,17 +100,6 @@ def _CopyCDBToOutput(output_dir, target_arch): _CopyImpl('uext.dll', dst_winext_dir, src_winext_dir) _CopyImpl('exts.dll', dst_winxp_dir, src_winxp_dir) _CopyImpl('ntsdexts.dll', dst_winxp_dir, src_winxp_dir) - if src_arch in ['x64', 'x86']: - # Copy all UCRT files from the debuggers directory, for compatibility with - # the Windows 10 18362 SDK (one UCRT file) and later versions (two UCRT - # files). The new file is api-ms-win-downlevel-kernel32-l2-1-0.dll and - # should be added to the copy_cdb_to_output outputs when we require a newer - # SDK. - for file in glob.glob(os.path.join(src_dir, 'api-ms-win*.dll')): - _CopyImpl(os.path.split(file)[1], output_dir, src_dir) - _CopyImpl('ucrtbase.dll', output_dir, src_crt_dir) - for dll_path in glob.glob(os.path.join(src_crt_dir, 'api-ms-win-*.dll')): - _CopyImpl(os.path.split(dll_path)[1], output_dir, src_crt_dir) return 0 diff --git a/build/win/gn_meta_sln.py b/build/win/gn_meta_sln.py index 862d278248f4..2165a1301f98 100644 --- a/build/win/gn_meta_sln.py +++ b/build/win/gn_meta_sln.py @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # @@ -6,7 +6,6 @@ # Helper utility to combine GN-generated Visual Studio projects into # a single meta-solution. -from __future__ import print_function import os import glob diff --git a/build/win/message_compiler.gni b/build/win/message_compiler.gni index 0ddbc1f424e6..33ced1cb7fb3 100644 --- a/build/win/message_compiler.gni +++ b/build/win/message_compiler.gni @@ -1,4 +1,4 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/build/win/message_compiler.py b/build/win/message_compiler.py index 51de52f0fca0..9759c1fd1a4f 100644 --- a/build/win/message_compiler.py +++ b/build/win/message_compiler.py @@ -1,4 +1,4 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -6,7 +6,6 @@ # # Usage: message_compiler.py [*] -from __future__ import print_function import difflib import distutils.dir_util diff --git a/build/win/reorder-imports.py b/build/win/reorder-imports.py index ee27ed19cc27..7dd8e1d8babb 100755 --- a/build/win/reorder-imports.py +++ b/build/win/reorder-imports.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2014 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2014 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -10,8 +10,10 @@ import subprocess import sys -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', - 'third_party', 'pefile')) +sys.path.insert( + 0, + os.path.join(os.path.dirname(__file__), '..', '..', 'third_party', + 'pefile_py3')) import pefile def reorder_imports(input_dir, output_dir, architecture): @@ -46,7 +48,7 @@ def reorder_imports(input_dir, output_dir, architecture): found_elf = False for i, peimport in enumerate(pe.DIRECTORY_ENTRY_IMPORT): - if peimport.dll.lower() == 'chrome_elf.dll': + if peimport.dll.lower() == b'chrome_elf.dll': assert not found_elf, 'only one chrome_elf.dll import expected' found_elf = True if i > 0: diff --git a/build/win/set_appcontainer_acls.py b/build/win/set_appcontainer_acls.py index 4a4e616cf2eb..06a1819548fa 100755 --- a/build/win/set_appcontainer_acls.py +++ b/build/win/set_appcontainer_acls.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2021 The Chromium Authors. All rights reserved. +# Copyright 2021 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Sets the app container ACLs on directory.""" diff --git a/build/win/use_ansi_codes.py b/build/win/use_ansi_codes.py index 5951c2ab4d6e..dfc3c3322596 100755 --- a/build/win/use_ansi_codes.py +++ b/build/win/use_ansi_codes.py @@ -1,10 +1,9 @@ -#!/usr/bin/env python -# Copyright (c) 2015 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Prints if the the terminal is likely to understand ANSI codes.""" -from __future__ import print_function import os diff --git a/build/write_build_date_header.py b/build/write_build_date_header.py deleted file mode 100755 index 77388288b3bb..000000000000 --- a/build/write_build_date_header.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2016 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. -"""Takes a timestamp and writes it in as readable text to a .h file.""" - -import argparse -import datetime -import os -import sys - - -def main(): - argument_parser = argparse.ArgumentParser() - argument_parser.add_argument('output_file', help='The file to write to') - argument_parser.add_argument('timestamp') - args = argument_parser.parse_args() - - date = datetime.datetime.utcfromtimestamp(int(args.timestamp)) - output = ('// Generated by //build/write_build_date_header.py\n' - '#ifndef BUILD_DATE\n' - '#define BUILD_DATE "{:%b %d %Y %H:%M:%S}"\n' - '#endif // BUILD_DATE\n'.format(date)) - - current_contents = '' - if os.path.isfile(args.output_file): - with open(args.output_file, 'r') as current_file: - current_contents = current_file.read() - - if current_contents != output: - with open(args.output_file, 'w') as output_file: - output_file.write(output) - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/build/write_buildflag_header.py b/build/write_buildflag_header.py index 47b9a0326505..89a073761e25 100755 --- a/build/write_buildflag_header.py +++ b/build/write_buildflag_header.py @@ -1,5 +1,5 @@ -#!/usr/bin/env python -# Copyright 2015 The Chromium Authors. All rights reserved. +#!/usr/bin/env python3 +# Copyright 2015 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -15,8 +15,8 @@ import optparse import os +import re import shlex -import sys class Options: @@ -39,10 +39,12 @@ def GetOptions(): help="Name of the response file containing the flags.") cmdline_options, cmdline_flags = parser.parse_args() - # Compute header guard by replacing some chars with _ and upper-casing. + # Compute a valid C++ header guard by replacing non valid chars with '_', + # upper-casing everything and prepending '_' if first symbol is digit. header_guard = cmdline_options.output.upper() - header_guard = \ - header_guard.replace('/', '_').replace('\\', '_').replace('.', '_') + if header_guard[0].isdigit(): + header_guard = '_' + header_guard + header_guard = re.sub('[^\w]', '_', header_guard) header_guard += '_' # The actual output file is inside the gen dir. @@ -83,7 +85,7 @@ def WriteHeader(options): output_file.write('\n#ifndef %s\n' % options.header_guard) output_file.write('#define %s\n\n' % options.header_guard) - output_file.write('#include "build/buildflag.h"\n\n') + output_file.write('#include "build/buildflag.h" // IWYU pragma: export\n\n') for pair in options.flags: output_file.write('#define BUILDFLAG_INTERNAL_%s() (%s)\n' % pair) @@ -91,13 +93,5 @@ def WriteHeader(options): output_file.write('\n#endif // %s\n' % options.header_guard) -if os.name == 'nt': - major, minor, build, platform, service_pack = sys.getwindowsversion() - # Windows 10 will be 6.2 on Python 2 and 10.0 on Python 3. This check - # handles both. - if major < 6 or (major == 6 and minor < 2): - raise Exception( - 'Unsupported OS. Building Chromium requires Windows 10. %s detected.' % - str(sys.getwindowsversion())) options = GetOptions() WriteHeader(options) diff --git a/build/xcode_binaries.yaml b/build/xcode_binaries.yaml index 779b3ac15f49..b9069f4b41f9 100644 --- a/build/xcode_binaries.yaml +++ b/build/xcode_binaries.yaml @@ -8,9 +8,9 @@ # To deploy the newly created cipd package across the fleet, modify # mac_toolchain.py to point to the new cipd hash. # -# Note that runhooks extracts the cipd file to build/mac/xcode_binaries -- your -# build/xcode_binaries you're creating in step 1 above isn't used as part of -# the Chromium build, build/mac_files/xcode_binaries is. So you need to +# Note that runhooks extracts the cipd file to build/mac_files/xcode_binaries +# -- your build/xcode_binaries you're creating in step 1 above isn't used as +# part of the Chromium build, build/mac_files/xcode_binaries is. So you need to # `runhooks` after updating the hash in mac_toolchain.py like everyone else to # get the new bits for your local build. # @@ -25,6 +25,7 @@ data: - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/bison - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/gm4 - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/gperf + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/install_name_tool - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/ld - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/libtool - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/lipo @@ -45,7 +46,8 @@ data: - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/libtapi.dylib - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/libexec/migcom - file: Contents/Developer/usr/bin/atos - - file: Contents/Resources/English.lproj/License.rtf + - file: Contents/Developer/usr/bin/notarytool + - file: Contents/Resources/en.lproj/License.rtf - file: Contents/Resources/LicenseInfo.plist - dir: Contents/SharedFrameworks/CoreSymbolicationDT.framework - dir: Contents/SharedFrameworks/DebugSymbolsDT.framework diff --git a/build/zip_helpers.py b/build/zip_helpers.py new file mode 100644 index 000000000000..b8ab9dd0baba --- /dev/null +++ b/build/zip_helpers.py @@ -0,0 +1,238 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Helper functions for dealing with .zip files.""" + +import os +import pathlib +import posixpath +import stat +import time +import zipfile + +_FIXED_ZIP_HEADER_LEN = 30 + + +def _set_alignment(zip_obj, zip_info, alignment): + """Sets a ZipInfo's extra field such that the file will be aligned. + + Args: + zip_obj: The ZipFile object that is being written. + zip_info: The ZipInfo object about to be written. + alignment: The amount of alignment (e.g. 4, or 4*1024). + """ + header_size = _FIXED_ZIP_HEADER_LEN + len(zip_info.filename) + pos = zip_obj.fp.tell() + header_size + padding_needed = (alignment - (pos % alignment)) % alignment + + # Python writes |extra| to both the local file header and the central + # directory's file header. Android's zipalign tool writes only to the + # local file header, so there is more overhead in using Python to align. + zip_info.extra = b'\0' * padding_needed + + +def _hermetic_date_time(timestamp=None): + if not timestamp: + return (2001, 1, 1, 0, 0, 0) + utc_time = time.gmtime(timestamp) + return (utc_time.tm_year, utc_time.tm_mon, utc_time.tm_mday, utc_time.tm_hour, + utc_time.tm_min, utc_time.tm_sec) + + +def add_to_zip_hermetic(zip_file, + zip_path, + *, + src_path=None, + data=None, + compress=None, + alignment=None, + timestamp=None): + """Adds a file to the given ZipFile with a hard-coded modified time. + + Args: + zip_file: ZipFile instance to add the file to. + zip_path: Destination path within the zip file (or ZipInfo instance). + src_path: Path of the source file. Mutually exclusive with |data|. + data: File data as a string. + compress: Whether to enable compression. Default is taken from ZipFile + constructor. + alignment: If set, align the data of the entry to this many bytes. + timestamp: The last modification date and time for the archive member. + """ + assert (src_path is None) != (data is None), ( + '|src_path| and |data| are mutually exclusive.') + if isinstance(zip_path, zipfile.ZipInfo): + zipinfo = zip_path + zip_path = zipinfo.filename + else: + zipinfo = zipfile.ZipInfo(filename=zip_path) + zipinfo.external_attr = 0o644 << 16 + + zipinfo.date_time = _hermetic_date_time(timestamp) + + if alignment: + _set_alignment(zip_file, zipinfo, alignment) + + # Filenames can contain backslashes, but it is more likely that we've + # forgotten to use forward slashes as a directory separator. + assert '\\' not in zip_path, 'zip_path should not contain \\: ' + zip_path + assert not posixpath.isabs(zip_path), 'Absolute zip path: ' + zip_path + assert not zip_path.startswith('..'), 'Should not start with ..: ' + zip_path + assert posixpath.normpath(zip_path) == zip_path, ( + f'Non-canonical zip_path: {zip_path} vs: {posixpath.normpath(zip_path)}') + assert zip_path not in zip_file.namelist(), ( + 'Tried to add a duplicate zip entry: ' + zip_path) + + if src_path and os.path.islink(src_path): + zipinfo.external_attr |= stat.S_IFLNK << 16 # mark as a symlink + zip_file.writestr(zipinfo, os.readlink(src_path)) + return + + # Maintain the executable bit. + if src_path: + st = os.stat(src_path) + for mode in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH): + if st.st_mode & mode: + zipinfo.external_attr |= mode << 16 + + if src_path: + with open(src_path, 'rb') as f: + data = f.read() + + # zipfile will deflate even when it makes the file bigger. To avoid + # growing files, disable compression at an arbitrary cut off point. + if len(data) < 16: + compress = False + + # None converts to ZIP_STORED, when passed explicitly rather than the + # default passed to the ZipFile constructor. + compress_type = zip_file.compression + if compress is not None: + compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED + zip_file.writestr(zipinfo, data, compress_type) + + +def add_files_to_zip(inputs, + output, + *, + base_dir=None, + compress=None, + zip_prefix_path=None, + timestamp=None): + """Creates a zip file from a list of files. + + Args: + inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples. + output: Path, fileobj, or ZipFile instance to add files to. + base_dir: Prefix to strip from inputs. + compress: Whether to compress + zip_prefix_path: Path prepended to file path in zip file. + timestamp: Unix timestamp to use for files in the archive. + """ + if base_dir is None: + base_dir = '.' + input_tuples = [] + for tup in inputs: + if isinstance(tup, str): + src_path = tup + zip_path = os.path.relpath(src_path, base_dir) + # Zip files always use / as path separator. + if os.path.sep != posixpath.sep: + zip_path = str(pathlib.Path(zip_path).as_posix()) + tup = (zip_path, src_path) + input_tuples.append(tup) + + # Sort by zip path to ensure stable zip ordering. + input_tuples.sort(key=lambda tup: tup[0]) + + out_zip = output + if not isinstance(output, zipfile.ZipFile): + out_zip = zipfile.ZipFile(output, 'w') + + try: + for zip_path, fs_path in input_tuples: + if zip_prefix_path: + zip_path = posixpath.join(zip_prefix_path, zip_path) + add_to_zip_hermetic(out_zip, + zip_path, + src_path=fs_path, + compress=compress, + timestamp=timestamp) + finally: + if output is not out_zip: + out_zip.close() + + +def zip_directory(output, base_dir, **kwargs): + """Zips all files in the given directory.""" + inputs = [] + for root, _, files in os.walk(base_dir): + for f in files: + inputs.append(os.path.join(root, f)) + + add_files_to_zip(inputs, output, base_dir=base_dir, **kwargs) + + +def merge_zips(output, input_zips, path_transform=None, compress=None): + """Combines all files from |input_zips| into |output|. + + Args: + output: Path, fileobj, or ZipFile instance to add files to. + input_zips: Iterable of paths to zip files to merge. + path_transform: Called for each entry path. Returns a new path, or None to + skip the file. + compress: Overrides compression setting from origin zip entries. + """ + assert not isinstance(input_zips, str) # Easy mistake to make. + if isinstance(output, zipfile.ZipFile): + out_zip = output + out_filename = output.filename + else: + assert isinstance(output, str), 'Was: ' + repr(output) + out_zip = zipfile.ZipFile(output, 'w') + out_filename = output + + # Include paths in the existing zip here to avoid adding duplicate files. + crc_by_name = {i.filename: (out_filename, i.CRC) for i in out_zip.infolist()} + + try: + for in_file in input_zips: + with zipfile.ZipFile(in_file, 'r') as in_zip: + for info in in_zip.infolist(): + # Ignore directories. + if info.filename[-1] == '/': + continue + if path_transform: + dst_name = path_transform(info.filename) + if dst_name is None: + continue + else: + dst_name = info.filename + + data = in_zip.read(info) + + # If there's a duplicate file, ensure contents is the same and skip + # adding it multiple times. + if dst_name in crc_by_name: + orig_filename, orig_crc = crc_by_name[dst_name] + new_crc = zipfile.crc32(data) + if new_crc == orig_crc: + continue + msg = f"""File appeared in multiple inputs with differing contents. +File: {dst_name} +Input1: {orig_filename} +Input2: {in_file}""" + raise Exception(msg) + + if compress is not None: + compress_entry = compress + else: + compress_entry = info.compress_type != zipfile.ZIP_STORED + add_to_zip_hermetic(out_zip, + dst_name, + data=data, + compress=compress_entry) + crc_by_name[dst_name] = (in_file, out_zip.getinfo(dst_name).CRC) + finally: + if output is not out_zip: + out_zip.close() diff --git a/build/zip_helpers_unittest.py b/build/zip_helpers_unittest.py new file mode 100755 index 000000000000..19000273b5ca --- /dev/null +++ b/build/zip_helpers_unittest.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import pathlib +import shutil +import sys +import tempfile +import unittest +import zipfile + +import zip_helpers + + +def _make_test_zips(tmp_dir, create_conflct=False): + zip1 = os.path.join(tmp_dir, 'A.zip') + zip2 = os.path.join(tmp_dir, 'B.zip') + with zipfile.ZipFile(zip1, 'w') as z: + z.writestr('file1', 'AAAAA') + z.writestr('file2', 'BBBBB') + with zipfile.ZipFile(zip2, 'w') as z: + z.writestr('file2', 'ABABA' if create_conflct else 'BBBBB') + z.writestr('file3', 'CCCCC') + return zip1, zip2 + + +class ZipHelpersTest(unittest.TestCase): + def test_merge_zips__identical_file(self): + with tempfile.TemporaryDirectory() as tmp_dir: + zip1, zip2 = _make_test_zips(tmp_dir) + + merged_zip = os.path.join(tmp_dir, 'merged.zip') + zip_helpers.merge_zips(merged_zip, [zip1, zip2]) + + with zipfile.ZipFile(merged_zip) as z: + self.assertEqual(z.namelist(), ['file1', 'file2', 'file3']) + + def test_merge_zips__conflict(self): + with tempfile.TemporaryDirectory() as tmp_dir: + zip1, zip2 = _make_test_zips(tmp_dir, create_conflct=True) + + merged_zip = os.path.join(tmp_dir, 'merged.zip') + with self.assertRaises(Exception): + zip_helpers.merge_zips(merged_zip, [zip1, zip2]) + + def test_merge_zips__conflict_with_append(self): + with tempfile.TemporaryDirectory() as tmp_dir: + zip1, zip2 = _make_test_zips(tmp_dir, create_conflct=True) + + with self.assertRaises(Exception): + with zipfile.ZipFile(zip1, 'a') as dst_zip: + zip_helpers.merge_zips(dst_zip, [zip2]) + + +if __name__ == '__main__': + unittest.main() diff --git a/build_overrides/build.gni b/build_overrides/build.gni index c69e1f649542..b5c8bd720371 100644 --- a/build_overrides/build.gni +++ b/build_overrides/build.gni @@ -13,12 +13,14 @@ # limitations under the License. build_with_chromium = false +is_nacl_nonsfi = false declare_args() { # Android 32-bit non-component, non-clang builds cannot have symbol_level=2 # due to 4GiB file size limit, see https://crbug.com/648948. # Set this flag to true to skip the assertion. ignore_elf32_limitations = false + is_chromecast = false } enable_java_templates = false diff --git a/build_overrides/crypto.gni b/build_overrides/crypto.gni new file mode 100644 index 000000000000..ff40f763c88d --- /dev/null +++ b/build_overrides/crypto.gni @@ -0,0 +1,17 @@ +# Copyright 2024 The Cobalt Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +declare_args() { + use_nss_certs = false +} diff --git a/build/util/is_python2.py b/build_overrides/util/is_python2.py similarity index 77% rename from build/util/is_python2.py rename to build_overrides/util/is_python2.py index 83a407ef4f9e..fbfa431ae76f 100644 --- a/build/util/is_python2.py +++ b/build_overrides/util/is_python2.py @@ -1,11 +1,10 @@ -# Copyright 2020 The Chromium Authors. All rights reserved. +# Copyright 2024 The Cobalt Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Script for checking if we're running Python 2 or 3.""" from __future__ import print_function -import subprocess import sys print("true" if sys.version_info.major == 2 else "false") diff --git a/crypto/BUILD.gn b/crypto/BUILD.gn index a8d6f2b7a30b..4208bb092d09 100644 --- a/crypto/BUILD.gn +++ b/crypto/BUILD.gn @@ -2,7 +2,7 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -import("//build/config/crypto.gni") +import("//build_overrides/crypto.gni") import("//testing/test.gni") component("crypto") { diff --git a/starboard/build/config/BUILDCONFIG.gn b/starboard/build/config/BUILDCONFIG.gn index f17850279d29..411e1c2687c5 100644 --- a/starboard/build/config/BUILDCONFIG.gn +++ b/starboard/build/config/BUILDCONFIG.gn @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -_is_python2 = exec_script("//build/util/is_python2.py", [], "json") +_is_python2 = exec_script("//build_overrides/util/is_python2.py", [], "json") assert(!_is_python2, "`python` must resolve to Python 3 when building with GN.") declare_args() { diff --git a/starboard/linux/x64x11/clang/3.9/platform_configuration/BUILD.gn b/starboard/linux/x64x11/clang/3.9/platform_configuration/BUILD.gn index b2037fdd0b9c..6d6b6bd9f81f 100644 --- a/starboard/linux/x64x11/clang/3.9/platform_configuration/BUILD.gn +++ b/starboard/linux/x64x11/clang/3.9/platform_configuration/BUILD.gn @@ -97,6 +97,9 @@ config("compiler_flags") { # Suppress warnings in libjpeg "-Wno-shift-negative-value", + + # Let older Clangs ignore newer Clangs' warnings. + "-Wno-unknown-warning-option", ] cflags_c += [ diff --git a/third_party/angle/gni/angle.gni b/third_party/angle/gni/angle.gni index 75ea0c5ae4ad..26d0cff78002 100644 --- a/third_party/angle/gni/angle.gni +++ b/third_party/angle/gni/angle.gni @@ -38,7 +38,7 @@ if (angle_has_build) { if (build_with_chromium) { import("//ui/ozone/ozone.gni") -} else { +} else if (!defined(ozone_platform_gbm)) { declare_args() { ozone_platform_gbm = false }